From 2b89bead50bc3cf0bc32cae8f3f7108ff1d54724 Mon Sep 17 00:00:00 2001 From: fat Date: Thu, 29 Jan 2026 18:55:55 -0800 Subject: [PATCH 1/4] move sdks over --- LICENSE.md | 189 ++ packages/git-storage-sdk-go/README.md | 75 + packages/git-storage-sdk-go/client.go | 355 ++++ packages/git-storage-sdk-go/client_test.go | 170 ++ packages/git-storage-sdk-go/commit.go | 384 ++++ packages/git-storage-sdk-go/commit_pack.go | 146 ++ packages/git-storage-sdk-go/commit_test.go | 104 + packages/git-storage-sdk-go/diff_commit.go | 208 ++ packages/git-storage-sdk-go/errors.go | 90 + packages/git-storage-sdk-go/fetch.go | 129 ++ packages/git-storage-sdk-go/go.mod | 8 + packages/git-storage-sdk-go/go.sum | 4 + packages/git-storage-sdk-go/helpers_test.go | 54 + packages/git-storage-sdk-go/moon.yml | 14 + packages/git-storage-sdk-go/repo.go | 754 ++++++++ packages/git-storage-sdk-go/repo_test.go | 224 +++ packages/git-storage-sdk-go/requests.go | 141 ++ packages/git-storage-sdk-go/responses.go | 180 ++ packages/git-storage-sdk-go/types.go | 607 ++++++ packages/git-storage-sdk-go/util.go | 260 +++ packages/git-storage-sdk-go/version.go | 8 + packages/git-storage-sdk-go/webhook.go | 171 ++ packages/git-storage-sdk-node/.gitignore | 4 + packages/git-storage-sdk-node/AGENTS.md | 68 + packages/git-storage-sdk-node/CLAUDE.md | 1 + packages/git-storage-sdk-node/README.md | 676 +++++++ packages/git-storage-sdk-node/moon.yml | 44 + packages/git-storage-sdk-node/package.json | 40 + .../git-storage-sdk-node/src/commit-pack.ts | 128 ++ packages/git-storage-sdk-node/src/commit.ts | 434 +++++ .../git-storage-sdk-node/src/diff-commit.ts | 302 +++ packages/git-storage-sdk-node/src/errors.ts | 50 + packages/git-storage-sdk-node/src/fetch.ts | 153 ++ packages/git-storage-sdk-node/src/index.ts | 1391 ++++++++++++++ packages/git-storage-sdk-node/src/schemas.ts | 217 +++ .../git-storage-sdk-node/src/stream-utils.ts | 255 +++ packages/git-storage-sdk-node/src/types.ts | 634 +++++++ packages/git-storage-sdk-node/src/util.ts | 44 + packages/git-storage-sdk-node/src/version.ts | 8 + packages/git-storage-sdk-node/src/webhook.ts | 323 ++++ .../tests/commit-from-diff.test.ts | 343 ++++ .../git-storage-sdk-node/tests/commit.test.ts | 836 ++++++++ .../tests/full-workflow.js | 999 ++++++++++ .../git-storage-sdk-node/tests/index.test.ts | 1680 +++++++++++++++++ .../tests/version.test.ts | 64 + .../tests/webhook.test.ts | 370 ++++ packages/git-storage-sdk-node/tsconfig.json | 9 + .../git-storage-sdk-node/tsconfig.tsup.json | 13 + packages/git-storage-sdk-node/tsup.config.ts | 21 + .../git-storage-sdk-node/vitest.config.ts | 11 + packages/git-storage-sdk-python/.gitignore | 61 + .../git-storage-sdk-python/.python-version | 1 + .../git-storage-sdk-python/DEVELOPMENT.md | 298 +++ packages/git-storage-sdk-python/LICENSE | 189 ++ packages/git-storage-sdk-python/MANIFEST.in | 5 + .../git-storage-sdk-python/PROJECT_SUMMARY.md | 264 +++ packages/git-storage-sdk-python/PUBLISHING.md | 400 ++++ packages/git-storage-sdk-python/QUICKSTART.md | 230 +++ packages/git-storage-sdk-python/README.md | 790 ++++++++ packages/git-storage-sdk-python/moon.yml | 91 + .../pierre_storage/__init__.py | 90 + .../pierre_storage/auth.py | 77 + .../pierre_storage/client.py | 449 +++++ .../pierre_storage/commit.py | 524 +++++ .../pierre_storage/errors.py | 81 + .../pierre_storage/py.typed | 1 + .../pierre_storage/repo.py | 1290 +++++++++++++ .../pierre_storage/types.py | 670 +++++++ .../pierre_storage/version.py | 13 + .../pierre_storage/webhook.py | 240 +++ .../git-storage-sdk-python/pyproject.toml | 85 + .../git-storage-sdk-python/scripts/setup.sh | 13 + .../git-storage-sdk-python/tests/__init__.py | 1 + .../git-storage-sdk-python/tests/conftest.py | 28 + .../tests/test_client.py | 873 +++++++++ .../tests/test_commit.py | 678 +++++++ .../git-storage-sdk-python/tests/test_repo.py | 1415 ++++++++++++++ .../tests/test_version.py | 48 + .../tests/test_webhook.py | 172 ++ packages/git-storage-sdk-python/uv.lock | 1460 ++++++++++++++ 80 files changed, 23930 insertions(+) create mode 100644 LICENSE.md create mode 100644 packages/git-storage-sdk-go/README.md create mode 100644 packages/git-storage-sdk-go/client.go create mode 100644 packages/git-storage-sdk-go/client_test.go create mode 100644 packages/git-storage-sdk-go/commit.go create mode 100644 packages/git-storage-sdk-go/commit_pack.go create mode 100644 packages/git-storage-sdk-go/commit_test.go create mode 100644 packages/git-storage-sdk-go/diff_commit.go create mode 100644 packages/git-storage-sdk-go/errors.go create mode 100644 packages/git-storage-sdk-go/fetch.go create mode 100644 packages/git-storage-sdk-go/go.mod create mode 100644 packages/git-storage-sdk-go/go.sum create mode 100644 packages/git-storage-sdk-go/helpers_test.go create mode 100644 packages/git-storage-sdk-go/moon.yml create mode 100644 packages/git-storage-sdk-go/repo.go create mode 100644 packages/git-storage-sdk-go/repo_test.go create mode 100644 packages/git-storage-sdk-go/requests.go create mode 100644 packages/git-storage-sdk-go/responses.go create mode 100644 packages/git-storage-sdk-go/types.go create mode 100644 packages/git-storage-sdk-go/util.go create mode 100644 packages/git-storage-sdk-go/version.go create mode 100644 packages/git-storage-sdk-go/webhook.go create mode 100644 packages/git-storage-sdk-node/.gitignore create mode 100644 packages/git-storage-sdk-node/AGENTS.md create mode 120000 packages/git-storage-sdk-node/CLAUDE.md create mode 100644 packages/git-storage-sdk-node/README.md create mode 100644 packages/git-storage-sdk-node/moon.yml create mode 100644 packages/git-storage-sdk-node/package.json create mode 100644 packages/git-storage-sdk-node/src/commit-pack.ts create mode 100644 packages/git-storage-sdk-node/src/commit.ts create mode 100644 packages/git-storage-sdk-node/src/diff-commit.ts create mode 100644 packages/git-storage-sdk-node/src/errors.ts create mode 100644 packages/git-storage-sdk-node/src/fetch.ts create mode 100644 packages/git-storage-sdk-node/src/index.ts create mode 100644 packages/git-storage-sdk-node/src/schemas.ts create mode 100644 packages/git-storage-sdk-node/src/stream-utils.ts create mode 100644 packages/git-storage-sdk-node/src/types.ts create mode 100644 packages/git-storage-sdk-node/src/util.ts create mode 100644 packages/git-storage-sdk-node/src/version.ts create mode 100644 packages/git-storage-sdk-node/src/webhook.ts create mode 100644 packages/git-storage-sdk-node/tests/commit-from-diff.test.ts create mode 100644 packages/git-storage-sdk-node/tests/commit.test.ts create mode 100644 packages/git-storage-sdk-node/tests/full-workflow.js create mode 100644 packages/git-storage-sdk-node/tests/index.test.ts create mode 100644 packages/git-storage-sdk-node/tests/version.test.ts create mode 100644 packages/git-storage-sdk-node/tests/webhook.test.ts create mode 100644 packages/git-storage-sdk-node/tsconfig.json create mode 100644 packages/git-storage-sdk-node/tsconfig.tsup.json create mode 100644 packages/git-storage-sdk-node/tsup.config.ts create mode 100644 packages/git-storage-sdk-node/vitest.config.ts create mode 100644 packages/git-storage-sdk-python/.gitignore create mode 100644 packages/git-storage-sdk-python/.python-version create mode 100644 packages/git-storage-sdk-python/DEVELOPMENT.md create mode 100644 packages/git-storage-sdk-python/LICENSE create mode 100644 packages/git-storage-sdk-python/MANIFEST.in create mode 100644 packages/git-storage-sdk-python/PROJECT_SUMMARY.md create mode 100644 packages/git-storage-sdk-python/PUBLISHING.md create mode 100644 packages/git-storage-sdk-python/QUICKSTART.md create mode 100644 packages/git-storage-sdk-python/README.md create mode 100644 packages/git-storage-sdk-python/moon.yml create mode 100644 packages/git-storage-sdk-python/pierre_storage/__init__.py create mode 100644 packages/git-storage-sdk-python/pierre_storage/auth.py create mode 100644 packages/git-storage-sdk-python/pierre_storage/client.py create mode 100644 packages/git-storage-sdk-python/pierre_storage/commit.py create mode 100644 packages/git-storage-sdk-python/pierre_storage/errors.py create mode 100644 packages/git-storage-sdk-python/pierre_storage/py.typed create mode 100644 packages/git-storage-sdk-python/pierre_storage/repo.py create mode 100644 packages/git-storage-sdk-python/pierre_storage/types.py create mode 100644 packages/git-storage-sdk-python/pierre_storage/version.py create mode 100644 packages/git-storage-sdk-python/pierre_storage/webhook.py create mode 100644 packages/git-storage-sdk-python/pyproject.toml create mode 100755 packages/git-storage-sdk-python/scripts/setup.sh create mode 100644 packages/git-storage-sdk-python/tests/__init__.py create mode 100644 packages/git-storage-sdk-python/tests/conftest.py create mode 100644 packages/git-storage-sdk-python/tests/test_client.py create mode 100644 packages/git-storage-sdk-python/tests/test_commit.py create mode 100644 packages/git-storage-sdk-python/tests/test_repo.py create mode 100644 packages/git-storage-sdk-python/tests/test_version.py create mode 100644 packages/git-storage-sdk-python/tests/test_webhook.py create mode 100644 packages/git-storage-sdk-python/uv.lock diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 000000000..e2c2b0606 --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,189 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, and + distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by the + copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all other + entities that control, are controlled by, or are under common control with + that entity. For the purposes of this definition, "control" means (i) the + power, direct or indirect, to cause the direction or management of such + entity, whether by contract or otherwise, or (ii) ownership of fifty percent + (50%) or more of the outstanding shares, or (iii) beneficial ownership of + such entity. + + "You" (or "Your") shall mean an individual or Legal Entity exercising + permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation source, and + configuration files. + + "Object" form shall mean any form resulting from mechanical transformation + or translation of a Source form, including but not limited to compiled + object code, generated documentation, and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or Object form, + made available under the License, as indicated by a copyright notice that is + included in or attached to the work (an example is provided in the Appendix + below). + + "Derivative Works" shall mean any work, whether in Source or Object form, + that is based on (or derived from) the Work and for which the editorial + revisions, annotations, elaborations, or other modifications represent, as a + whole, an original work of authorship. For the purposes of this License, + Derivative Works shall not include works that remain separable from, or + merely link (or bind by name) to the interfaces of, the Work and Derivative + Works thereof. + + "Contribution" shall mean any work of authorship, including the original + version of the Work and any modifications or additions to that Work or + Derivative Works thereof, that is intentionally submitted to Licensor for + inclusion in the Work by the copyright owner or by an individual or Legal + Entity authorized to submit on behalf of the copyright owner. For the + purposes of this definition, "submitted" means any form of electronic, + verbal, or written communication sent to the Licensor or its + representatives, including but not limited to communication on electronic + mailing lists, source code control systems, and issue tracking systems that + are managed by, or on behalf of, the Licensor for the purpose of discussing + and improving the Work, but excluding communication that is conspicuously + marked or otherwise designated in writing by the copyright owner as "Not a + Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity on + behalf of whom a Contribution has been received by Licensor and subsequently + incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this + License, each Contributor hereby grants to You a perpetual, worldwide, + non-exclusive, no-charge, royalty-free, irrevocable copyright license to + reproduce, prepare Derivative Works of, publicly display, publicly perform, + sublicense, and distribute the Work and such Derivative Works in Source or + Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this + License, each Contributor hereby grants to You a perpetual, worldwide, + non-exclusive, no-charge, royalty-free, irrevocable (except as stated in + this section) patent license to make, have made, use, offer to sell, sell, + import, and otherwise transfer the Work, where such license applies only to + those patent claims licensable by such Contributor that are necessarily + infringed by their Contribution(s) alone or by combination of their + Contribution(s) with the Work to which such Contribution(s) was submitted. + If You institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work or a + Contribution incorporated within the Work constitutes direct or contributory + patent infringement, then any patent licenses granted to You under this + License for that Work shall terminate as of the date such litigation is + filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or + Derivative Works thereof in any medium, with or without modifications, and + in Source or Object form, provided that You meet the following conditions: + + (a) You must give any other recipients of the Work or Derivative Works a + copy of this License; and + + (b) You must cause any modified files to carry prominent notices stating + that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works that You + distribute, all copyright, patent, trademark, and attribution notices from + the Source form of the Work, excluding those notices that do not pertain to + any part of the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its distribution, + then any Derivative Works that You distribute must include a readable copy + of the attribution notices contained within such NOTICE file, excluding + those notices that do not pertain to any part of the Derivative Works, in at + least one of the following places: within a NOTICE text file distributed as + part of the Derivative Works; within the Source form or documentation, if + provided along with the Derivative Works; or, within a display generated by + the Derivative Works, if and wherever such third-party notices normally + appear. The contents of the NOTICE file are for informational purposes only + and do not modify the License. You may add Your own attribution notices + within Derivative Works that You distribute, alongside or as an addendum to + the NOTICE text from the Work, provided that such additional attribution + notices cannot be construed as modifying the License. + + You may add Your own copyright statement to Your modifications and may + provide additional or different license terms and conditions for use, + reproduction, or distribution of Your modifications, or for any such + Derivative Works as a whole, provided Your use, reproduction, and + distribution of the Work otherwise complies with the conditions stated in + this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any + Contribution intentionally submitted for inclusion in the Work by You to the + Licensor shall be under the terms and conditions of this License, without + any additional terms or conditions. Notwithstanding the above, nothing + herein shall supersede or modify the terms of any separate license agreement + you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, + trademarks, service marks, or product names of the Licensor, except as + required for reasonable and customary use in describing the origin of the + Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in + writing, Licensor provides the Work (and each Contributor provides its + Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied, including, without limitation, any + warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or + FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining + the appropriateness of using or redistributing the Work and assume any risks + associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in + tort (including negligence), contract, or otherwise, unless required by + applicable law (such as deliberate and grossly negligent acts) or agreed to + in writing, shall any Contributor be liable to You for damages, including + any direct, indirect, special, incidental, or consequential damages of any + character arising as a result of this License or out of the use or inability + to use the Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all other + commercial damages or losses), even if such Contributor has been advised of + the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or + Derivative Works thereof, You may choose to offer, and charge a fee for, + acceptance of support, warranty, indemnity, or other liability obligations + and/or rights consistent with this License. However, in accepting such + obligations, You may act only on Your own behalf and on Your sole + responsibility, not on behalf of any other Contributor, and only if You + agree to indemnify, defend, and hold each Contributor harmless for any + liability incurred by, or claims asserted against, such Contributor by + reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright 2025 Pierre Computer Company + +Licensed under the Apache License, Version 2.0 (the "License"); you may not use +this file except in compliance with the License. You may obtain a copy of the +License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software distributed +under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. diff --git a/packages/git-storage-sdk-go/README.md b/packages/git-storage-sdk-go/README.md new file mode 100644 index 000000000..7b29a2c14 --- /dev/null +++ b/packages/git-storage-sdk-go/README.md @@ -0,0 +1,75 @@ +# pierre-storage-go + +Pierre Git Storage SDK for Go. + +## Usage + +```go +package main + +import ( + "context" + "fmt" + "log" + + storage "pierre.co/pierre/monorepo/packages/git-storage-sdk-go" +) + +func main() { + client, err := storage.NewClient(storage.Options{ + Name: "your-name", + Key: "-----BEGIN PRIVATE KEY-----\n...\n-----END PRIVATE KEY-----", + }) + if err != nil { + log.Fatal(err) + } + + repo, err := client.CreateRepo(context.Background(), storage.CreateRepoOptions{}) + if err != nil { + log.Fatal(err) + } + + url, err := repo.RemoteURL(context.Background(), storage.RemoteURLOptions{}) + if err != nil { + log.Fatal(err) + } + +fmt.Println(url) +} +``` + +### Create a commit + +```go +builder, err := repo.CreateCommit(storage.CommitOptions{ + TargetBranch: "main", + CommitMessage: "Update docs", + Author: storage.CommitSignature{Name: "Docs Bot", Email: "docs@example.com"}, +}) +if err != nil { + log.Fatal(err) +} + +builder, err = builder.AddFileFromString("docs/readme.md", "# Updated\n", nil) +if err != nil { + log.Fatal(err) +} + +result, err := builder.Send(context.Background()) +if err != nil { + log.Fatal(err) +} + +fmt.Println(result.CommitSHA) +``` + +TTL fields use `time.Duration` values (for example `time.Hour`). + +## Features + +- Create, list, find, and delete repositories. +- Generate authenticated git remote URLs. +- Read files, list branches/commits, and run grep queries. +- Create commits via streaming commit-pack or diff-commit endpoints. +- Restore commits, manage git notes, and create branches. +- Validate webhook signatures and parse push events. diff --git a/packages/git-storage-sdk-go/client.go b/packages/git-storage-sdk-go/client.go new file mode 100644 index 000000000..4df6f1ef6 --- /dev/null +++ b/packages/git-storage-sdk-go/client.go @@ -0,0 +1,355 @@ +package storage + +import ( + "context" + "crypto/ecdsa" + "crypto/x509" + "encoding/pem" + "errors" + "net/url" + "strings" + "time" + + "github.com/golang-jwt/jwt/v5" + "github.com/google/uuid" +) + +const ( + defaultAPIBaseURL = "https://api.{{org}}.code.storage" + defaultStorageBaseURL = "{{org}}.code.storage" + defaultTokenTTL = time.Hour + defaultJWTTTL = 365 * 24 * time.Hour +) + +// NewClient creates a Git storage client. +func NewClient(options Options) (*Client, error) { + if strings.TrimSpace(options.Name) == "" || strings.TrimSpace(options.Key) == "" { + return nil, errors.New("GitStorage requires a name and key. Please check your configuration and try again.") + } + + apiBaseURL := options.APIBaseURL + if apiBaseURL == "" { + apiBaseURL = DefaultAPIBaseURL(options.Name) + } + storageBaseURL := options.StorageBaseURL + if storageBaseURL == "" { + storageBaseURL = DefaultStorageBaseURL(options.Name) + } + version := options.APIVersion + if version == 0 { + version = DefaultAPIVersion + } + + privateKey, err := parseECPrivateKey([]byte(options.Key)) + if err != nil { + return nil, err + } + + client := &Client{ + options: Options{ + Name: options.Name, + Key: options.Key, + APIBaseURL: apiBaseURL, + StorageBaseURL: storageBaseURL, + APIVersion: version, + DefaultTTL: options.DefaultTTL, + HTTPClient: options.HTTPClient, + }, + privateKey: privateKey, + } + client.api = newAPIFetcher(apiBaseURL, version, options.HTTPClient) + return client, nil +} + +// NewGitStorage is an alias for NewClient. +func NewGitStorage(options Options) (*Client, error) { + return NewClient(options) +} + +// NewCodeStorage is an alias for NewClient. +func NewCodeStorage(options Options) (*Client, error) { + return NewClient(options) +} + +// DefaultAPIBaseURL builds the default API base URL for an org. +func DefaultAPIBaseURL(name string) string { + return strings.ReplaceAll(defaultAPIBaseURL, "{{org}}", name) +} + +// DefaultStorageBaseURL builds the default storage base URL for an org. +func DefaultStorageBaseURL(name string) string { + return strings.ReplaceAll(defaultStorageBaseURL, "{{org}}", name) +} + +// Config returns the resolved client options. +func (c *Client) Config() Options { + return c.options +} + +// CreateRepo creates a new repository. +func (c *Client) CreateRepo(ctx context.Context, options CreateRepoOptions) (*Repo, error) { + repoID := options.ID + if repoID == "" { + repoID = uuid.NewString() + } + + ttl := resolveInvocationTTL(options.InvocationOptions, defaultTokenTTL) + jwtToken, err := c.generateJWT(repoID, RemoteURLOptions{Permissions: []Permission{PermissionRepoWrite}, TTL: ttl}) + if err != nil { + return nil, err + } + + var baseRepo *baseRepoPayload + isFork := false + resolvedDefaultBranch := "" + + if options.BaseRepo != nil { + switch base := options.BaseRepo.(type) { + case ForkBaseRepo: + isFork = true + baseRepoToken, err := c.generateJWT(base.ID, RemoteURLOptions{Permissions: []Permission{PermissionGitRead}, TTL: ttl}) + if err != nil { + return nil, err + } + baseRepo = &baseRepoPayload{ + Provider: "code", + Owner: c.options.Name, + Name: base.ID, + Operation: "fork", + Auth: &authPayload{Token: baseRepoToken}, + } + if strings.TrimSpace(base.Ref) != "" { + baseRepo.Ref = base.Ref + } + if strings.TrimSpace(base.SHA) != "" { + baseRepo.SHA = base.SHA + } + if strings.TrimSpace(options.DefaultBranch) != "" { + resolvedDefaultBranch = options.DefaultBranch + } + case GitHubBaseRepo: + provider := base.Provider + if provider == "" { + provider = RepoProviderGitHub + } + baseRepo = &baseRepoPayload{ + Provider: string(provider), + Owner: base.Owner, + Name: base.Name, + } + if strings.TrimSpace(base.DefaultBranch) != "" { + baseRepo.DefaultBranch = base.DefaultBranch + resolvedDefaultBranch = base.DefaultBranch + } + default: + return nil, errors.New("unsupported base repo type") + } + } + + if resolvedDefaultBranch == "" { + if strings.TrimSpace(options.DefaultBranch) != "" { + resolvedDefaultBranch = options.DefaultBranch + } else if !isFork { + resolvedDefaultBranch = "main" + } + } + + var body interface{} + if baseRepo != nil || resolvedDefaultBranch != "" { + body = &createRepoRequest{ + BaseRepo: baseRepo, + DefaultBranch: resolvedDefaultBranch, + } + } + + resp, err := c.api.post(ctx, "repos", nil, body, jwtToken, &requestOptions{allowedStatus: map[int]bool{409: true}}) + if err != nil { + return nil, err + } + defer resp.Body.Close() + if resp.StatusCode == 409 { + return nil, errors.New("Repository already exists") + } + + if resolvedDefaultBranch == "" { + resolvedDefaultBranch = "main" + } + return &Repo{ID: repoID, DefaultBranch: resolvedDefaultBranch, client: c}, nil +} + +// ListRepos lists repositories for the org. +func (c *Client) ListRepos(ctx context.Context, options ListReposOptions) (ListReposResult, error) { + ttl := resolveInvocationTTL(options.InvocationOptions, defaultTokenTTL) + jwtToken, err := c.generateJWT("org", RemoteURLOptions{Permissions: []Permission{PermissionOrgRead}, TTL: ttl}) + if err != nil { + return ListReposResult{}, err + } + + params := url.Values{} + if options.Cursor != "" { + params.Set("cursor", options.Cursor) + } + if options.Limit > 0 { + params.Set("limit", itoa(options.Limit)) + } + if len(params) == 0 { + params = nil + } + + resp, err := c.api.get(ctx, "repos", params, jwtToken, nil) + if err != nil { + return ListReposResult{}, err + } + defer resp.Body.Close() + + var payload listReposResponse + if err := decodeJSON(resp, &payload); err != nil { + return ListReposResult{}, err + } + + result := ListReposResult{HasMore: payload.HasMore} + if payload.NextCursor != "" { + result.NextCursor = payload.NextCursor + } + for _, repo := range payload.Repos { + entry := RepoInfo{ + RepoID: repo.RepoID, + URL: repo.URL, + DefaultBranch: repo.DefaultBranch, + CreatedAt: repo.CreatedAt, + } + if repo.BaseRepo != nil { + entry.BaseRepo = &RepoBaseInfo{ + Provider: repo.BaseRepo.Provider, + Owner: repo.BaseRepo.Owner, + Name: repo.BaseRepo.Name, + } + } + result.Repos = append(result.Repos, entry) + } + + return result, nil +} + +// FindOne retrieves a repo by ID. +func (c *Client) FindOne(ctx context.Context, options FindOneOptions) (*Repo, error) { + if strings.TrimSpace(options.ID) == "" { + return nil, errors.New("findOne id is required") + } + jwtToken, err := c.generateJWT(options.ID, RemoteURLOptions{Permissions: []Permission{PermissionGitRead}, TTL: defaultTokenTTL}) + if err != nil { + return nil, err + } + + resp, err := c.api.get(ctx, "repo", nil, jwtToken, &requestOptions{allowedStatus: map[int]bool{404: true}}) + if err != nil { + return nil, err + } + defer resp.Body.Close() + if resp.StatusCode == 404 { + return nil, nil + } + + var payload struct { + DefaultBranch string `json:"default_branch"` + } + if err := decodeJSON(resp, &payload); err != nil { + return nil, err + } + defaultBranch := payload.DefaultBranch + if defaultBranch == "" { + defaultBranch = "main" + } + return &Repo{ID: options.ID, DefaultBranch: defaultBranch, client: c}, nil +} + +// DeleteRepo deletes a repository by ID. +func (c *Client) DeleteRepo(ctx context.Context, options DeleteRepoOptions) (DeleteRepoResult, error) { + if strings.TrimSpace(options.ID) == "" { + return DeleteRepoResult{}, errors.New("deleteRepo id is required") + } + ttl := resolveInvocationTTL(options.InvocationOptions, defaultTokenTTL) + jwtToken, err := c.generateJWT(options.ID, RemoteURLOptions{Permissions: []Permission{PermissionRepoWrite}, TTL: ttl}) + if err != nil { + return DeleteRepoResult{}, err + } + + resp, err := c.api.delete(ctx, "repos/delete", nil, nil, jwtToken, &requestOptions{allowedStatus: map[int]bool{404: true, 409: true}}) + if err != nil { + return DeleteRepoResult{}, err + } + defer resp.Body.Close() + + if resp.StatusCode == 404 { + return DeleteRepoResult{}, errors.New("Repository not found") + } + if resp.StatusCode == 409 { + return DeleteRepoResult{}, errors.New("Repository already deleted") + } + + var payload struct { + RepoID string `json:"repo_id"` + Message string `json:"message"` + } + if err := decodeJSON(resp, &payload); err != nil { + return DeleteRepoResult{}, err + } + + return DeleteRepoResult{RepoID: payload.RepoID, Message: payload.Message}, nil +} + +func (c *Client) generateJWT(repoID string, options RemoteURLOptions) (string, error) { + permissions := options.Permissions + if len(permissions) == 0 { + permissions = []Permission{PermissionGitWrite, PermissionGitRead} + } + + ttl := options.TTL + if ttl <= 0 { + if c.options.DefaultTTL > 0 { + ttl = c.options.DefaultTTL + } else { + ttl = defaultJWTTTL + } + } + + issuedAt := time.Now() + claims := jwt.MapClaims{ + "iss": c.options.Name, + "sub": "@pierre/storage", + "repo": repoID, + "scopes": permissions, + "iat": issuedAt.Unix(), + "exp": issuedAt.Add(ttl).Unix(), + } + + token := jwt.NewWithClaims(jwt.SigningMethodES256, claims) + return token.SignedString(c.privateKey) +} + +func parseECPrivateKey(pemBytes []byte) (*ecdsa.PrivateKey, error) { + block, _ := pem.Decode(pemBytes) + if block == nil { + return nil, errors.New("failed to parse private key PEM") + } + + if key, err := x509.ParsePKCS8PrivateKey(block.Bytes); err == nil { + if ecKey, ok := key.(*ecdsa.PrivateKey); ok { + return ecKey, nil + } + return nil, errors.New("private key is not ECDSA") + } + + if ecKey, err := x509.ParseECPrivateKey(block.Bytes); err == nil { + return ecKey, nil + } + + return nil, errors.New("unsupported private key format") +} + +func resolveInvocationTTL(options InvocationOptions, defaultTTL time.Duration) time.Duration { + if options.TTL > 0 { + return options.TTL + } + return defaultTTL +} diff --git a/packages/git-storage-sdk-go/client_test.go b/packages/git-storage-sdk-go/client_test.go new file mode 100644 index 000000000..ea5913cf9 --- /dev/null +++ b/packages/git-storage-sdk-go/client_test.go @@ -0,0 +1,170 @@ +package storage + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "strings" + "testing" + "time" +) + +func TestNewClientValidation(t *testing.T) { + _, err := NewClient(Options{}) + if err == nil || !strings.Contains(err.Error(), "requires a name and key") { + t.Fatalf("expected validation error, got %v", err) + } + _, err = NewClient(Options{Name: "", Key: "test"}) + if err == nil { + t.Fatalf("expected error for empty name") + } + _, err = NewClient(Options{Name: "test", Key: ""}) + if err == nil { + t.Fatalf("expected error for empty key") + } +} + +func TestDefaultBaseURLs(t *testing.T) { + api := DefaultAPIBaseURL("acme") + if api != "https://api.acme.code.storage" { + t.Fatalf("unexpected api url: %s", api) + } + storage := DefaultStorageBaseURL("acme") + if storage != "acme.code.storage" { + t.Fatalf("unexpected storage url: %s", storage) + } +} + +func TestCreateRepoDefaultBranch(t *testing.T) { + var receivedBody map[string]interface{} + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/api/v1/repos" { + t.Fatalf("unexpected path: %s", r.URL.Path) + } + decoder := json.NewDecoder(r.Body) + _ = decoder.Decode(&receivedBody) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"repo_id":"repo","url":"https://repo.git"}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + + _, err = client.CreateRepo(nil, CreateRepoOptions{}) + if err != nil { + t.Fatalf("create repo error: %v", err) + } + + if receivedBody["default_branch"] != "main" { + t.Fatalf("expected default_branch main, got %#v", receivedBody["default_branch"]) + } +} + +func TestCreateRepoForkBaseRepo(t *testing.T) { + var receivedBody map[string]interface{} + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + decoder := json.NewDecoder(r.Body) + _ = decoder.Decode(&receivedBody) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"repo_id":"repo","url":"https://repo.git"}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + + _, err = client.CreateRepo(nil, CreateRepoOptions{ + BaseRepo: ForkBaseRepo{ID: "template", Ref: "main"}, + }) + if err != nil { + t.Fatalf("create repo error: %v", err) + } + + baseRepo, ok := receivedBody["base_repo"].(map[string]interface{}) + if !ok { + t.Fatalf("expected base_repo payload") + } + if baseRepo["provider"] != "code" { + t.Fatalf("expected provider code") + } + if baseRepo["name"] != "template" { + t.Fatalf("expected name template") + } + auth, ok := baseRepo["auth"].(map[string]interface{}) + if !ok || auth["token"] == "" { + t.Fatalf("expected auth token") + } +} + +func TestListReposScopes(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + token := strings.TrimPrefix(r.Header.Get("Authorization"), "Bearer ") + claims := parseJWTFromToken(t, token) + if claims["repo"] != "org" { + t.Fatalf("expected repo org") + } + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"repos":[],"has_more":false}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + + _, err = client.ListRepos(nil, ListReposOptions{}) + if err != nil { + t.Fatalf("list repos error: %v", err) + } +} + +func TestDeleteRepoTTL(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + token := strings.TrimPrefix(r.Header.Get("Authorization"), "Bearer ") + claims := parseJWTFromToken(t, token) + exp := int64(claims["exp"].(float64)) + iat := int64(claims["iat"].(float64)) + if exp-iat != 300 { + t.Fatalf("expected ttl 300, got %d", exp-iat) + } + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"repo_id":"repo","message":"ok"}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + + _, err = client.DeleteRepo(nil, DeleteRepoOptions{ID: "repo", InvocationOptions: InvocationOptions{TTL: 300 * time.Second}}) + if err != nil { + t.Fatalf("delete repo error: %v", err) + } +} + +func TestConfigAliases(t *testing.T) { + client, err := NewCodeStorage(Options{Name: "acme", Key: testKey}) + if err != nil { + t.Fatalf("client error: %v", err) + } + cfg := client.Config() + if cfg.Name != "acme" { + t.Fatalf("unexpected config") + } +} + +func TestAliasConstructors(t *testing.T) { + if _, err := NewGitStorage(Options{Name: "acme", Key: testKey}); err != nil { + t.Fatalf("NewGitStorage error: %v", err) + } + if _, err := NewCodeStorage(Options{Name: "acme", Key: testKey}); err != nil { + t.Fatalf("NewCodeStorage error: %v", err) + } +} diff --git a/packages/git-storage-sdk-go/commit.go b/packages/git-storage-sdk-go/commit.go new file mode 100644 index 000000000..85e0ac4da --- /dev/null +++ b/packages/git-storage-sdk-go/commit.go @@ -0,0 +1,384 @@ +package storage + +import ( + "bytes" + "context" + "encoding/base64" + "encoding/json" + "errors" + "io" + "net/http" + "strings" + "time" + + "github.com/google/uuid" +) + +const maxChunkBytes = 4 * 1024 * 1024 + +type commitOperation struct { + Path string + ContentID string + Mode GitFileMode + Operation string + Source io.Reader +} + +func (b *CommitBuilder) normalize() error { + b.options.TargetBranch = strings.TrimSpace(b.options.TargetBranch) + b.options.TargetRef = strings.TrimSpace(b.options.TargetRef) + b.options.CommitMessage = strings.TrimSpace(b.options.CommitMessage) + + if b.options.TargetBranch != "" { + branch, err := normalizeBranchName(b.options.TargetBranch) + if err != nil { + return err + } + b.options.TargetBranch = branch + } else if b.options.TargetRef != "" { + branch, err := normalizeLegacyTargetRef(b.options.TargetRef) + if err != nil { + return err + } + b.options.TargetBranch = branch + } else { + return errors.New("createCommit targetBranch is required") + } + + if b.options.CommitMessage == "" { + return errors.New("createCommit commitMessage is required") + } + + if strings.TrimSpace(b.options.Author.Name) == "" || strings.TrimSpace(b.options.Author.Email) == "" { + return errors.New("createCommit author name and email are required") + } + b.options.Author.Name = strings.TrimSpace(b.options.Author.Name) + b.options.Author.Email = strings.TrimSpace(b.options.Author.Email) + + b.options.ExpectedHeadSHA = strings.TrimSpace(b.options.ExpectedHeadSHA) + b.options.BaseBranch = strings.TrimSpace(b.options.BaseBranch) + if b.options.BaseBranch != "" && strings.HasPrefix(b.options.BaseBranch, "refs/") { + return errors.New("createCommit baseBranch must not include refs/ prefix") + } + + if b.options.EphemeralBase && b.options.BaseBranch == "" { + return errors.New("createCommit ephemeralBase requires baseBranch") + } + + if b.options.Committer != nil { + if strings.TrimSpace(b.options.Committer.Name) == "" || strings.TrimSpace(b.options.Committer.Email) == "" { + return errors.New("createCommit committer name and email are required when provided") + } + b.options.Committer.Name = strings.TrimSpace(b.options.Committer.Name) + b.options.Committer.Email = strings.TrimSpace(b.options.Committer.Email) + } + + return nil +} + +// AddFile adds a file to the commit. +func (b *CommitBuilder) AddFile(path string, source interface{}, options *CommitFileOptions) (*CommitBuilder, error) { + if err := b.ensureNotSent(); err != nil { + return nil, err + } + normalizedPath, err := normalizePath(path) + if err != nil { + return nil, err + } + + reader, err := toReader(source) + if err != nil { + return nil, err + } + + mode := GitFileModeRegular + if options != nil && options.Mode != "" { + mode = options.Mode + } + + b.ops = append(b.ops, commitOperation{ + Path: normalizedPath, + ContentID: uuid.NewString(), + Mode: mode, + Operation: "upsert", + Source: reader, + }) + return b, nil +} + +// AddFileFromString adds a text file. +func (b *CommitBuilder) AddFileFromString(path string, contents string, options *CommitTextFileOptions) (*CommitBuilder, error) { + encoding := "utf-8" + if options != nil && options.Encoding != "" { + encoding = options.Encoding + } + encoding = strings.ToLower(strings.TrimSpace(encoding)) + if encoding != "utf8" && encoding != "utf-8" { + return nil, errors.New("unsupported encoding: " + encoding) + } + if options == nil { + return b.AddFile(path, []byte(contents), nil) + } + return b.AddFile(path, []byte(contents), &options.CommitFileOptions) +} + +// DeletePath removes a file or directory. +func (b *CommitBuilder) DeletePath(path string) (*CommitBuilder, error) { + if err := b.ensureNotSent(); err != nil { + return nil, err + } + normalizedPath, err := normalizePath(path) + if err != nil { + return nil, err + } + b.ops = append(b.ops, commitOperation{ + Path: normalizedPath, + ContentID: uuid.NewString(), + Operation: "delete", + }) + return b, nil +} + +// Send finalizes the commit. +func (b *CommitBuilder) Send(ctx context.Context) (CommitResult, error) { + if err := b.ensureNotSent(); err != nil { + return CommitResult{}, err + } + b.sent = true + + if strings.TrimSpace(b.repoID) == "" { + return CommitResult{}, errors.New("createCommit repository id is required") + } + if b.client == nil { + return CommitResult{}, errors.New("createCommit client is required") + } + + ttl := resolveCommitTTL(b.options.InvocationOptions, defaultTokenTTL) + jwtToken, err := b.client.generateJWT(b.repoID, RemoteURLOptions{Permissions: []Permission{PermissionGitWrite}, TTL: ttl}) + if err != nil { + return CommitResult{}, err + } + + metadata := buildCommitMetadata(b.options, b.ops) + + pipeReader, pipeWriter := io.Pipe() + encoder := json.NewEncoder(pipeWriter) + encoder.SetEscapeHTML(false) + + go func() { + defer pipeWriter.Close() + if err := encoder.Encode(metadataEnvelope{Metadata: metadata}); err != nil { + _ = pipeWriter.CloseWithError(err) + return + } + + for _, op := range b.ops { + if op.Operation != "upsert" { + continue + } + if err := writeBlobChunks(encoder, op.ContentID, op.Source); err != nil { + _ = pipeWriter.CloseWithError(err) + return + } + } + }() + + url := b.client.api.basePath() + "/repos/commit-pack" + resp, err := doStreamingRequest(ctx, b.client.api.httpClient, http.MethodPost, url, jwtToken, pipeReader) + if err != nil { + return CommitResult{}, err + } + defer resp.Body.Close() + + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + fallback := "createCommit request failed (" + itoa(resp.StatusCode) + " " + resp.Status + ")" + statusMessage, statusLabel, refUpdate, err := parseCommitPackError(resp, fallback) + if err != nil { + return CommitResult{}, err + } + return CommitResult{}, newRefUpdateError(statusMessage, statusLabel, refUpdate) + } + + var ack commitPackAck + if err := decodeJSON(resp, &ack); err != nil { + return CommitResult{}, err + } + + return buildCommitResult(ack) +} + +func (b *CommitBuilder) ensureNotSent() error { + if b.sent { + return errors.New("createCommit builder cannot be reused after send()") + } + return nil +} + +func buildCommitMetadata(options CommitOptions, ops []commitOperation) *commitMetadataPayload { + files := make([]fileEntryPayload, 0, len(ops)) + for _, op := range ops { + entry := fileEntryPayload{ + Path: op.Path, + ContentID: op.ContentID, + Operation: op.Operation, + } + if op.Operation == "upsert" && op.Mode != "" { + entry.Mode = string(op.Mode) + } + files = append(files, entry) + } + + metadata := &commitMetadataPayload{ + TargetBranch: options.TargetBranch, + CommitMessage: options.CommitMessage, + Author: authorInfo{ + Name: options.Author.Name, + Email: options.Author.Email, + }, + Files: files, + } + + if options.ExpectedHeadSHA != "" { + metadata.ExpectedHeadSHA = options.ExpectedHeadSHA + } + if options.BaseBranch != "" { + metadata.BaseBranch = options.BaseBranch + } + if options.Committer != nil { + metadata.Committer = &authorInfo{ + Name: options.Committer.Name, + Email: options.Committer.Email, + } + } + if options.Ephemeral { + metadata.Ephemeral = true + } + if options.EphemeralBase { + metadata.EphemeralBase = true + } + + return metadata +} + +func writeBlobChunks(encoder *json.Encoder, contentID string, reader io.Reader) error { + buf := make([]byte, maxChunkBytes) + emitted := false + for { + n, err := reader.Read(buf) + if n > 0 { + payload := blobChunkEnvelope{ + BlobChunk: blobChunkPayload{ + ContentID: contentID, + Data: base64.StdEncoding.EncodeToString(buf[:n]), + EOF: err == io.EOF, + }, + } + emitted = true + if err := encoder.Encode(payload); err != nil { + return err + } + if err == io.EOF { + return nil + } + } + if err == io.EOF { + if !emitted { + payload := blobChunkEnvelope{ + BlobChunk: blobChunkPayload{ + ContentID: contentID, + Data: "", + EOF: true, + }, + } + return encoder.Encode(payload) + } + return nil + } + if err != nil { + return err + } + } +} + +func normalizePath(path string) (string, error) { + path = strings.TrimSpace(path) + if path == "" { + return "", errors.New("File path must be a non-empty string") + } + return strings.TrimPrefix(path, "/"), nil +} + +func normalizeBranchName(value string) (string, error) { + trimmed := strings.TrimSpace(value) + if trimmed == "" { + return "", errors.New("createCommit targetBranch is required") + } + if strings.HasPrefix(trimmed, "refs/heads/") { + branch := strings.TrimSpace(strings.TrimPrefix(trimmed, "refs/heads/")) + if branch == "" { + return "", errors.New("createCommit targetBranch is required") + } + return branch, nil + } + if strings.HasPrefix(trimmed, "refs/") { + return "", errors.New("createCommit targetBranch must not include refs/ prefix") + } + return trimmed, nil +} + +func normalizeLegacyTargetRef(ref string) (string, error) { + trimmed := strings.TrimSpace(ref) + if trimmed == "" { + return "", errors.New("createCommit targetRef is required") + } + if !strings.HasPrefix(trimmed, "refs/heads/") { + return "", errors.New("createCommit targetRef must start with refs/heads/") + } + branch := strings.TrimSpace(strings.TrimPrefix(trimmed, "refs/heads/")) + if branch == "" { + return "", errors.New("createCommit targetRef must include a branch name") + } + return branch, nil +} + +func resolveCommitTTL(options InvocationOptions, defaultValue time.Duration) time.Duration { + if options.TTL > 0 { + return options.TTL + } + return defaultValue +} + +func doStreamingRequest(ctx context.Context, client *http.Client, method string, url string, jwtToken string, body io.Reader) (*http.Response, error) { + if ctx == nil { + ctx = context.Background() + } + if client == nil { + client = http.DefaultClient + } + + req, err := http.NewRequestWithContext(ctx, method, url, body) + if err != nil { + return nil, err + } + req.Header.Set("Authorization", "Bearer "+jwtToken) + req.Header.Set("Content-Type", "application/x-ndjson") + req.Header.Set("Accept", "application/json") + req.Header.Set("Code-Storage-Agent", userAgent()) + + return client.Do(req) +} + +func toReader(source interface{}) (io.Reader, error) { + switch value := source.(type) { + case nil: + return nil, errors.New("unsupported content source; expected binary data") + case []byte: + return bytes.NewReader(value), nil + case string: + return strings.NewReader(value), nil + case io.Reader: + return value, nil + default: + return nil, errors.New("unsupported content source; expected binary data") + } +} diff --git a/packages/git-storage-sdk-go/commit_pack.go b/packages/git-storage-sdk-go/commit_pack.go new file mode 100644 index 000000000..6650cfab8 --- /dev/null +++ b/packages/git-storage-sdk-go/commit_pack.go @@ -0,0 +1,146 @@ +package storage + +import ( + "encoding/json" + "errors" + "io" + "net/http" + "strconv" + "strings" +) + +type commitPackAck struct { + Commit struct { + CommitSHA string `json:"commit_sha"` + TreeSHA string `json:"tree_sha"` + TargetBranch string `json:"target_branch"` + PackBytes int `json:"pack_bytes"` + BlobCount int `json:"blob_count"` + } `json:"commit"` + Result struct { + Branch string `json:"branch"` + OldSHA string `json:"old_sha"` + NewSHA string `json:"new_sha"` + Success bool `json:"success"` + Status string `json:"status"` + Message string `json:"message,omitempty"` + } `json:"result"` +} + +type commitPackResponse struct { + Commit *struct { + CommitSHA string `json:"commit_sha"` + TreeSHA string `json:"tree_sha"` + TargetBranch string `json:"target_branch"` + PackBytes int `json:"pack_bytes"` + BlobCount int `json:"blob_count"` + } `json:"commit,omitempty"` + Result struct { + Branch string `json:"branch"` + OldSHA string `json:"old_sha"` + NewSHA string `json:"new_sha"` + Success *bool `json:"success"` + Status string `json:"status"` + Message string `json:"message"` + } `json:"result"` +} + +type errorEnvelope struct { + Error string `json:"error"` +} + +func buildCommitResult(ack commitPackAck) (CommitResult, error) { + refUpdate := RefUpdate{ + Branch: ack.Result.Branch, + OldSHA: ack.Result.OldSHA, + NewSHA: ack.Result.NewSHA, + } + + if !ack.Result.Success { + message := ack.Result.Message + if strings.TrimSpace(message) == "" { + message = "Commit failed with status " + ack.Result.Status + } + return CommitResult{}, newRefUpdateError(message, ack.Result.Status, &refUpdate) + } + + return CommitResult{ + CommitSHA: ack.Commit.CommitSHA, + TreeSHA: ack.Commit.TreeSHA, + TargetBranch: ack.Commit.TargetBranch, + PackBytes: ack.Commit.PackBytes, + BlobCount: ack.Commit.BlobCount, + RefUpdate: refUpdate, + }, nil +} + +func parseCommitPackError(resp *http.Response, fallbackMessage string) (string, string, *RefUpdate, error) { + body, err := readAll(resp) + if err != nil { + return "", "", nil, err + } + + statusLabel := defaultStatusLabel(resp.StatusCode) + var refUpdate *RefUpdate + message := "" + + var parsed commitPackResponse + if err := json.Unmarshal(body, &parsed); err == nil { + if strings.TrimSpace(parsed.Result.Status) != "" { + statusLabel = strings.TrimSpace(parsed.Result.Status) + } + if parsed.Result.Message != "" { + message = strings.TrimSpace(parsed.Result.Message) + } + refUpdate = partialRefUpdate(parsed.Result.Branch, parsed.Result.OldSHA, parsed.Result.NewSHA) + } + + if message == "" { + var errEnv errorEnvelope + if err := json.Unmarshal(body, &errEnv); err == nil { + if strings.TrimSpace(errEnv.Error) != "" { + message = strings.TrimSpace(errEnv.Error) + } + } + } + + if message == "" && len(body) > 0 { + message = strings.TrimSpace(string(body)) + } + + if message == "" { + if fallbackMessage != "" { + message = fallbackMessage + } else { + message = "commit request failed (" + strconv.Itoa(resp.StatusCode) + " " + resp.Status + ")" + } + } + + return message, statusLabel, refUpdate, nil +} + +func defaultStatusLabel(statusCode int) string { + status := inferRefUpdateReason(strconv.Itoa(statusCode)) + if status == RefUpdateReasonUnknown { + return string(RefUpdateReasonFailed) + } + return string(status) +} + +func partialRefUpdate(branch string, oldSHA string, newSHA string) *RefUpdate { + branch = strings.TrimSpace(branch) + oldSHA = strings.TrimSpace(oldSHA) + newSHA = strings.TrimSpace(newSHA) + + if branch == "" && oldSHA == "" && newSHA == "" { + return nil + } + return &RefUpdate{Branch: branch, OldSHA: oldSHA, NewSHA: newSHA} +} + +func readAll(resp *http.Response) ([]byte, error) { + if resp.Body == nil { + return nil, errors.New("response body is empty") + } + return io.ReadAll(resp.Body) +} diff --git a/packages/git-storage-sdk-go/commit_test.go b/packages/git-storage-sdk-go/commit_test.go new file mode 100644 index 000000000..fcaea3b66 --- /dev/null +++ b/packages/git-storage-sdk-go/commit_test.go @@ -0,0 +1,104 @@ +package storage + +import ( + "bufio" + "encoding/json" + "net/http" + "net/http/httptest" + "strings" + "testing" +) + +func TestCommitPackRequest(t *testing.T) { + var requestPath string + var headerAgent string + var lines []string + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + requestPath = r.URL.Path + headerAgent = r.Header.Get("Code-Storage-Agent") + scanner := bufio.NewScanner(r.Body) + for scanner.Scan() { + lines = append(lines, scanner.Text()) + } + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"commit":{"commit_sha":"abc","tree_sha":"def","target_branch":"main","pack_bytes":10,"blob_count":1},"result":{"branch":"main","old_sha":"old","new_sha":"new","success":true,"status":"ok"}}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + builder, err := repo.CreateCommit(CommitOptions{ + TargetBranch: "main", + CommitMessage: "test", + Author: CommitSignature{Name: "Tester", Email: "test@example.com"}, + }) + if err != nil { + t.Fatalf("builder error: %v", err) + } + + builder, err = builder.AddFileFromString("README.md", "hello", nil) + if err != nil { + t.Fatalf("add file error: %v", err) + } + + _, err = builder.Send(nil) + if err != nil { + t.Fatalf("send error: %v", err) + } + + if requestPath != "/api/v1/repos/commit-pack" { + t.Fatalf("unexpected path: %s", requestPath) + } + if headerAgent == "" || !strings.Contains(headerAgent, "code-storage-go-sdk/") { + t.Fatalf("missing Code-Storage-Agent header") + } + if len(lines) < 1 { + t.Fatalf("expected ndjson lines") + } + + var first map[string]interface{} + if err := json.Unmarshal([]byte(lines[0]), &first); err != nil { + t.Fatalf("decode first line: %v", err) + } + metadata, ok := first["metadata"].(map[string]interface{}) + if !ok { + t.Fatalf("missing metadata") + } + if metadata["target_branch"] != "main" { + t.Fatalf("unexpected metadata target_branch") + } +} + +func TestCommitFromDiffRequest(t *testing.T) { + var requestPath string + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + requestPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"commit":{"commit_sha":"abc","tree_sha":"def","target_branch":"main","pack_bytes":10,"blob_count":1},"result":{"branch":"main","old_sha":"old","new_sha":"new","success":true,"status":"ok"}}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + _, err = repo.CreateCommitFromDiff(nil, CommitFromDiffOptions{ + TargetBranch: "main", + CommitMessage: "test", + Author: CommitSignature{Name: "Tester", Email: "test@example.com"}, + Diff: "diff content", + }) + if err != nil { + t.Fatalf("commit from diff error: %v", err) + } + if requestPath != "/api/v1/repos/diff-commit" { + t.Fatalf("unexpected path: %s", requestPath) + } +} diff --git a/packages/git-storage-sdk-go/diff_commit.go b/packages/git-storage-sdk-go/diff_commit.go new file mode 100644 index 000000000..b99a87663 --- /dev/null +++ b/packages/git-storage-sdk-go/diff_commit.go @@ -0,0 +1,208 @@ +package storage + +import ( + "context" + "encoding/base64" + "encoding/json" + "errors" + "io" + "net/http" + "strings" +) + +type diffCommitExecutor struct { + options CommitFromDiffOptions + client *Client +} + +func (d *diffCommitExecutor) normalize() (CommitFromDiffOptions, error) { + options := d.options + options.TargetBranch = strings.TrimSpace(options.TargetBranch) + options.CommitMessage = strings.TrimSpace(options.CommitMessage) + options.ExpectedHeadSHA = strings.TrimSpace(options.ExpectedHeadSHA) + options.BaseBranch = strings.TrimSpace(options.BaseBranch) + + if options.Diff == nil { + return options, errors.New("createCommitFromDiff diff is required") + } + + branch, err := normalizeDiffBranchName(options.TargetBranch) + if err != nil { + return options, err + } + options.TargetBranch = branch + + if options.CommitMessage == "" { + return options, errors.New("createCommitFromDiff commitMessage is required") + } + + if strings.TrimSpace(options.Author.Name) == "" || strings.TrimSpace(options.Author.Email) == "" { + return options, errors.New("createCommitFromDiff author name and email are required") + } + options.Author.Name = strings.TrimSpace(options.Author.Name) + options.Author.Email = strings.TrimSpace(options.Author.Email) + + if options.BaseBranch != "" && strings.HasPrefix(options.BaseBranch, "refs/") { + return options, errors.New("createCommitFromDiff baseBranch must not include refs/ prefix") + } + if options.EphemeralBase && options.BaseBranch == "" { + return options, errors.New("createCommitFromDiff ephemeralBase requires baseBranch") + } + + if options.Committer != nil { + if strings.TrimSpace(options.Committer.Name) == "" || strings.TrimSpace(options.Committer.Email) == "" { + return options, errors.New("createCommitFromDiff committer name and email are required when provided") + } + options.Committer.Name = strings.TrimSpace(options.Committer.Name) + options.Committer.Email = strings.TrimSpace(options.Committer.Email) + } + + return options, nil +} + +func (d *diffCommitExecutor) send(ctx context.Context, repoID string) (CommitResult, error) { + options, err := d.normalize() + if err != nil { + return CommitResult{}, err + } + + ttl := resolveCommitTTL(options.InvocationOptions, defaultTokenTTL) + jwtToken, err := d.client.generateJWT(repoID, RemoteURLOptions{Permissions: []Permission{PermissionGitWrite}, TTL: ttl}) + if err != nil { + return CommitResult{}, err + } + + diffReader, err := toReader(options.Diff) + if err != nil { + return CommitResult{}, err + } + + metadata := buildDiffCommitMetadata(options) + + pipeReader, pipeWriter := io.Pipe() + encoder := json.NewEncoder(pipeWriter) + encoder.SetEscapeHTML(false) + + go func() { + defer pipeWriter.Close() + if err := encoder.Encode(metadataEnvelope{Metadata: metadata}); err != nil { + _ = pipeWriter.CloseWithError(err) + return + } + if err := writeDiffChunks(encoder, diffReader); err != nil { + _ = pipeWriter.CloseWithError(err) + return + } + }() + + url := d.client.api.basePath() + "/repos/diff-commit" + resp, err := doStreamingRequest(ctx, d.client.api.httpClient, http.MethodPost, url, jwtToken, pipeReader) + if err != nil { + return CommitResult{}, err + } + defer resp.Body.Close() + + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + fallback := "createCommitFromDiff request failed (" + itoa(resp.StatusCode) + " " + resp.Status + ")" + statusMessage, statusLabel, refUpdate, err := parseCommitPackError(resp, fallback) + if err != nil { + return CommitResult{}, err + } + return CommitResult{}, newRefUpdateError(statusMessage, statusLabel, refUpdate) + } + + var ack commitPackAck + if err := decodeJSON(resp, &ack); err != nil { + return CommitResult{}, err + } + + return buildCommitResult(ack) +} + +func buildDiffCommitMetadata(options CommitFromDiffOptions) *commitMetadataPayload { + metadata := &commitMetadataPayload{ + TargetBranch: options.TargetBranch, + CommitMessage: options.CommitMessage, + Author: authorInfo{ + Name: options.Author.Name, + Email: options.Author.Email, + }, + } + + if options.ExpectedHeadSHA != "" { + metadata.ExpectedHeadSHA = options.ExpectedHeadSHA + } + if options.BaseBranch != "" { + metadata.BaseBranch = options.BaseBranch + } + if options.Ephemeral { + metadata.Ephemeral = true + } + if options.EphemeralBase { + metadata.EphemeralBase = true + } + if options.Committer != nil { + metadata.Committer = &authorInfo{ + Name: options.Committer.Name, + Email: options.Committer.Email, + } + } + + return metadata +} + +func writeDiffChunks(encoder *json.Encoder, reader io.Reader) error { + buf := make([]byte, maxChunkBytes) + emitted := false + for { + n, err := reader.Read(buf) + if n > 0 { + payload := diffChunkEnvelope{ + DiffChunk: diffChunkPayload{ + Data: base64.StdEncoding.EncodeToString(buf[:n]), + EOF: err == io.EOF, + }, + } + emitted = true + if err := encoder.Encode(payload); err != nil { + return err + } + if err == io.EOF { + return nil + } + } + if err == io.EOF { + if !emitted { + payload := diffChunkEnvelope{ + DiffChunk: diffChunkPayload{ + Data: "", + EOF: true, + }, + } + return encoder.Encode(payload) + } + return nil + } + if err != nil { + return err + } + } +} + +func normalizeDiffBranchName(value string) (string, error) { + trimmed := strings.TrimSpace(value) + if trimmed == "" { + return "", errors.New("createCommitFromDiff targetBranch is required") + } + if strings.HasPrefix(trimmed, "refs/heads/") { + branch := strings.TrimSpace(strings.TrimPrefix(trimmed, "refs/heads/")) + if branch == "" { + return "", errors.New("createCommitFromDiff targetBranch must include a branch name") + } + return branch, nil + } + if strings.HasPrefix(trimmed, "refs/") { + return "", errors.New("createCommitFromDiff targetBranch must not include refs/ prefix") + } + return trimmed, nil +} diff --git a/packages/git-storage-sdk-go/errors.go b/packages/git-storage-sdk-go/errors.go new file mode 100644 index 000000000..9e693f1fd --- /dev/null +++ b/packages/git-storage-sdk-go/errors.go @@ -0,0 +1,90 @@ +package storage + +import ( + "strings" +) + +// APIError describes HTTP errors for non-commit endpoints. +type APIError struct { + Message string + Status int + StatusText string + Method string + URL string + Body interface{} +} + +func (e *APIError) Error() string { + return e.Message +} + +// RefUpdateReason describes a ref update failure reason. +type RefUpdateReason string + +const ( + RefUpdateReasonPreconditionFailed RefUpdateReason = "precondition_failed" + RefUpdateReasonConflict RefUpdateReason = "conflict" + RefUpdateReasonNotFound RefUpdateReason = "not_found" + RefUpdateReasonInvalid RefUpdateReason = "invalid" + RefUpdateReasonTimeout RefUpdateReason = "timeout" + RefUpdateReasonUnauthorized RefUpdateReason = "unauthorized" + RefUpdateReasonForbidden RefUpdateReason = "forbidden" + RefUpdateReasonUnavailable RefUpdateReason = "unavailable" + RefUpdateReasonInternal RefUpdateReason = "internal" + RefUpdateReasonFailed RefUpdateReason = "failed" + RefUpdateReasonUnknown RefUpdateReason = "unknown" +) + +// RefUpdateError describes failed ref updates. +type RefUpdateError struct { + Message string + Status string + Reason RefUpdateReason + RefUpdate *RefUpdate +} + +func (e *RefUpdateError) Error() string { + return e.Message +} + +func inferRefUpdateReason(status string) RefUpdateReason { + if strings.TrimSpace(status) == "" { + return RefUpdateReasonUnknown + } + + switch strings.ToLower(strings.TrimSpace(status)) { + case "precondition_failed": + return RefUpdateReasonPreconditionFailed + case "conflict": + return RefUpdateReasonConflict + case "not_found": + return RefUpdateReasonNotFound + case "invalid": + return RefUpdateReasonInvalid + case "timeout": + return RefUpdateReasonTimeout + case "unauthorized": + return RefUpdateReasonUnauthorized + case "forbidden": + return RefUpdateReasonForbidden + case "unavailable": + return RefUpdateReasonUnavailable + case "internal": + return RefUpdateReasonInternal + case "failed": + return RefUpdateReasonFailed + case "ok": + return RefUpdateReasonUnknown + default: + return RefUpdateReasonUnknown + } +} + +func newRefUpdateError(message string, status string, refUpdate *RefUpdate) *RefUpdateError { + return &RefUpdateError{ + Message: message, + Status: status, + Reason: inferRefUpdateReason(status), + RefUpdate: refUpdate, + } +} diff --git a/packages/git-storage-sdk-go/fetch.go b/packages/git-storage-sdk-go/fetch.go new file mode 100644 index 000000000..1ec3922f2 --- /dev/null +++ b/packages/git-storage-sdk-go/fetch.go @@ -0,0 +1,129 @@ +package storage + +import ( + "bytes" + "context" + "encoding/json" + "io" + "net/http" + "net/url" + "strings" +) + +type apiFetcher struct { + baseURL string + version int + httpClient *http.Client +} + +func newAPIFetcher(baseURL string, version int, client *http.Client) *apiFetcher { + if client == nil { + client = http.DefaultClient + } + return &apiFetcher{baseURL: strings.TrimRight(baseURL, "/"), version: version, httpClient: client} +} + +func (f *apiFetcher) basePath() string { + return f.baseURL + "/api/v" + itoa(f.version) +} + +func (f *apiFetcher) buildURL(path string, params url.Values) string { + if params == nil || len(params) == 0 { + return f.basePath() + "/" + path + } + return f.basePath() + "/" + path + "?" + params.Encode() +} + +type requestOptions struct { + allowedStatus map[int]bool +} + +func (f *apiFetcher) request(ctx context.Context, method string, path string, params url.Values, body interface{}, jwt string, opts *requestOptions) (*http.Response, error) { + if ctx == nil { + ctx = context.Background() + } + + urlStr := f.buildURL(path, params) + var bodyReader io.Reader + if body != nil { + payload, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(payload) + } + + req, err := http.NewRequestWithContext(ctx, method, urlStr, bodyReader) + if err != nil { + return nil, err + } + + req.Header.Set("Authorization", "Bearer "+jwt) + req.Header.Set("Code-Storage-Agent", userAgent()) + if body != nil { + req.Header.Set("Content-Type", "application/json") + } + + resp, err := f.httpClient.Do(req) + if err != nil { + return nil, err + } + + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + if opts != nil && opts.allowedStatus != nil && opts.allowedStatus[resp.StatusCode] { + return resp, nil + } + + defer resp.Body.Close() + bodyBytes, _ := io.ReadAll(resp.Body) + var parsed interface{} + message := "" + contentType := resp.Header.Get("content-type") + if strings.Contains(contentType, "application/json") { + var payload map[string]interface{} + if err := json.Unmarshal(bodyBytes, &payload); err == nil { + parsed = payload + if errVal, ok := payload["error"].(string); ok && strings.TrimSpace(errVal) != "" { + message = strings.TrimSpace(errVal) + } + } + } + if message == "" && len(bodyBytes) > 0 { + message = strings.TrimSpace(string(bodyBytes)) + if message != "" { + parsed = message + } + } + + if message == "" { + message = "Request " + method + " " + urlStr + " failed with status " + itoa(resp.StatusCode) + " " + resp.Status + } + + return nil, &APIError{ + Message: message, + Status: resp.StatusCode, + StatusText: resp.Status, + Method: method, + URL: urlStr, + Body: parsed, + } + } + + return resp, nil +} + +func (f *apiFetcher) get(ctx context.Context, path string, params url.Values, jwt string, opts *requestOptions) (*http.Response, error) { + return f.request(ctx, http.MethodGet, path, params, nil, jwt, opts) +} + +func (f *apiFetcher) post(ctx context.Context, path string, params url.Values, body interface{}, jwt string, opts *requestOptions) (*http.Response, error) { + return f.request(ctx, http.MethodPost, path, params, body, jwt, opts) +} + +func (f *apiFetcher) put(ctx context.Context, path string, params url.Values, body interface{}, jwt string, opts *requestOptions) (*http.Response, error) { + return f.request(ctx, http.MethodPut, path, params, body, jwt, opts) +} + +func (f *apiFetcher) delete(ctx context.Context, path string, params url.Values, body interface{}, jwt string, opts *requestOptions) (*http.Response, error) { + return f.request(ctx, http.MethodDelete, path, params, body, jwt, opts) +} diff --git a/packages/git-storage-sdk-go/go.mod b/packages/git-storage-sdk-go/go.mod new file mode 100644 index 000000000..c73c4e0b4 --- /dev/null +++ b/packages/git-storage-sdk-go/go.mod @@ -0,0 +1,8 @@ +module pierre.co/pierre/monorepo/packages/git-storage-sdk-go + +go 1.24.3 + +require ( + github.com/golang-jwt/jwt/v5 v5.3.0 + github.com/google/uuid v1.6.0 +) diff --git a/packages/git-storage-sdk-go/go.sum b/packages/git-storage-sdk-go/go.sum new file mode 100644 index 000000000..63206fc12 --- /dev/null +++ b/packages/git-storage-sdk-go/go.sum @@ -0,0 +1,4 @@ +github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= +github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= diff --git a/packages/git-storage-sdk-go/helpers_test.go b/packages/git-storage-sdk-go/helpers_test.go new file mode 100644 index 000000000..637e40bd5 --- /dev/null +++ b/packages/git-storage-sdk-go/helpers_test.go @@ -0,0 +1,54 @@ +package storage + +import ( + "net/url" + "strings" + "testing" + + "github.com/golang-jwt/jwt/v5" +) + +const testKey = "-----BEGIN PRIVATE KEY-----\nMIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgy3DPdzzsP6tOOvmorjbx6L7mpFmKKL2hNWNW3urkN8ehRANCAAQ7/DPhGH3kaWl0YEIO+W9WmhyCclDGyTh6suablSura7ZDG8hpm3oNsq/ykC3Scfsw6ZTuuVuLlXKV/be/Xr0d\n-----END PRIVATE KEY-----\n" + +func parseJWTFromURL(t *testing.T, rawURL string) jwt.MapClaims { + t.Helper() + parsed, err := url.Parse(rawURL) + if err != nil { + t.Fatalf("parse url: %v", err) + } + password, ok := parsed.User.Password() + if !ok || strings.TrimSpace(password) == "" { + t.Fatalf("jwt not found in url") + } + claims := jwt.MapClaims{} + _, err = jwt.ParseWithClaims(password, claims, func(token *jwt.Token) (interface{}, error) { + key, err := parseECPrivateKey([]byte(testKey)) + if err != nil { + return nil, err + } + return &key.PublicKey, nil + }) + if err != nil { + t.Fatalf("parse jwt: %v", err) + } + return claims +} + +func parseJWTFromToken(t *testing.T, token string) jwt.MapClaims { + t.Helper() + if strings.TrimSpace(token) == "" { + t.Fatalf("jwt token is empty") + } + claims := jwt.MapClaims{} + _, err := jwt.ParseWithClaims(token, claims, func(token *jwt.Token) (interface{}, error) { + key, err := parseECPrivateKey([]byte(testKey)) + if err != nil { + return nil, err + } + return &key.PublicKey, nil + }) + if err != nil { + t.Fatalf("parse jwt: %v", err) + } + return claims +} diff --git a/packages/git-storage-sdk-go/moon.yml b/packages/git-storage-sdk-go/moon.yml new file mode 100644 index 000000000..02bfa4cc0 --- /dev/null +++ b/packages/git-storage-sdk-go/moon.yml @@ -0,0 +1,14 @@ +type: library +language: go + +project: + name: git-storage-sdk-go + description: Pierre Git Storage SDK for Go + +tasks: + test: + command: go test ./... + inputs: + - "**/*.go" + - go.mod + - go.sum diff --git a/packages/git-storage-sdk-go/repo.go b/packages/git-storage-sdk-go/repo.go new file mode 100644 index 000000000..d7a0ed3ea --- /dev/null +++ b/packages/git-storage-sdk-go/repo.go @@ -0,0 +1,754 @@ +package storage + +import ( + "context" + "errors" + "net/http" + "net/url" + "strings" +) + +var restoreCommitAllowedStatus = map[int]bool{ + 400: true, + 401: true, + 403: true, + 404: true, + 408: true, + 409: true, + 412: true, + 422: true, + 429: true, + 499: true, + 500: true, + 502: true, + 503: true, + 504: true, +} + +var noteWriteAllowedStatus = map[int]bool{ + 400: true, + 401: true, + 403: true, + 404: true, + 408: true, + 409: true, + 412: true, + 422: true, + 429: true, + 499: true, + 500: true, + 502: true, + 503: true, + 504: true, +} + +// RemoteURL returns an authenticated remote URL. +func (r *Repo) RemoteURL(ctx context.Context, options RemoteURLOptions) (string, error) { + jwtToken, err := r.client.generateJWT(r.ID, options) + if err != nil { + return "", err + } + + u := url.URL{ + Scheme: "https", + Host: r.client.options.StorageBaseURL, + Path: "/" + r.ID + ".git", + } + u.User = url.UserPassword("t", jwtToken) + return u.String(), nil +} + +// EphemeralRemoteURL returns the ephemeral remote URL. +func (r *Repo) EphemeralRemoteURL(ctx context.Context, options RemoteURLOptions) (string, error) { + jwtToken, err := r.client.generateJWT(r.ID, options) + if err != nil { + return "", err + } + + u := url.URL{ + Scheme: "https", + Host: r.client.options.StorageBaseURL, + Path: "/" + r.ID + "+ephemeral.git", + } + u.User = url.UserPassword("t", jwtToken) + return u.String(), nil +} + +// FileStream returns the raw response for streaming file contents. +func (r *Repo) FileStream(ctx context.Context, options GetFileOptions) (*http.Response, error) { + if strings.TrimSpace(options.Path) == "" { + return nil, errors.New("getFileStream path is required") + } + + ttl := resolveInvocationTTL(options.InvocationOptions, defaultTokenTTL) + jwtToken, err := r.client.generateJWT(r.ID, RemoteURLOptions{Permissions: []Permission{PermissionGitRead}, TTL: ttl}) + if err != nil { + return nil, err + } + + params := url.Values{} + params.Set("path", options.Path) + if options.Ref != "" { + params.Set("ref", options.Ref) + } + if options.Ephemeral != nil { + params.Set("ephemeral", boolToString(*options.Ephemeral)) + } + if options.EphemeralBase != nil { + params.Set("ephemeral_base", boolToString(*options.EphemeralBase)) + } + + resp, err := r.client.api.get(ctx, "repos/file", params, jwtToken, nil) + if err != nil { + return nil, err + } + + return resp, nil +} + +// ListFiles lists file paths. +func (r *Repo) ListFiles(ctx context.Context, options ListFilesOptions) (ListFilesResult, error) { + ttl := resolveInvocationTTL(options.InvocationOptions, defaultTokenTTL) + jwtToken, err := r.client.generateJWT(r.ID, RemoteURLOptions{Permissions: []Permission{PermissionGitRead}, TTL: ttl}) + if err != nil { + return ListFilesResult{}, err + } + + params := url.Values{} + if options.Ref != "" { + params.Set("ref", options.Ref) + } + if options.Ephemeral != nil { + params.Set("ephemeral", boolToString(*options.Ephemeral)) + } + if len(params) == 0 { + params = nil + } + + resp, err := r.client.api.get(ctx, "repos/files", params, jwtToken, nil) + if err != nil { + return ListFilesResult{}, err + } + defer resp.Body.Close() + + var payload listFilesResponse + if err := decodeJSON(resp, &payload); err != nil { + return ListFilesResult{}, err + } + + return ListFilesResult{Paths: payload.Paths, Ref: payload.Ref}, nil +} + +// ListBranches lists branches. +func (r *Repo) ListBranches(ctx context.Context, options ListBranchesOptions) (ListBranchesResult, error) { + ttl := resolveInvocationTTL(options.InvocationOptions, defaultTokenTTL) + jwtToken, err := r.client.generateJWT(r.ID, RemoteURLOptions{Permissions: []Permission{PermissionGitRead}, TTL: ttl}) + if err != nil { + return ListBranchesResult{}, err + } + + params := url.Values{} + if options.Cursor != "" { + params.Set("cursor", options.Cursor) + } + if options.Limit > 0 { + params.Set("limit", itoa(options.Limit)) + } + if len(params) == 0 { + params = nil + } + + resp, err := r.client.api.get(ctx, "repos/branches", params, jwtToken, nil) + if err != nil { + return ListBranchesResult{}, err + } + defer resp.Body.Close() + + var payload listBranchesResponse + if err := decodeJSON(resp, &payload); err != nil { + return ListBranchesResult{}, err + } + + result := ListBranchesResult{HasMore: payload.HasMore} + if payload.NextCursor != "" { + result.NextCursor = payload.NextCursor + } + for _, branch := range payload.Branches { + result.Branches = append(result.Branches, BranchInfo{ + Cursor: branch.Cursor, + Name: branch.Name, + HeadSHA: branch.HeadSHA, + CreatedAt: branch.CreatedAt, + }) + } + return result, nil +} + +// ListCommits lists commits. +func (r *Repo) ListCommits(ctx context.Context, options ListCommitsOptions) (ListCommitsResult, error) { + ttl := resolveInvocationTTL(options.InvocationOptions, defaultTokenTTL) + jwtToken, err := r.client.generateJWT(r.ID, RemoteURLOptions{Permissions: []Permission{PermissionGitRead}, TTL: ttl}) + if err != nil { + return ListCommitsResult{}, err + } + + params := url.Values{} + if options.Branch != "" { + params.Set("branch", options.Branch) + } + if options.Cursor != "" { + params.Set("cursor", options.Cursor) + } + if options.Limit > 0 { + params.Set("limit", itoa(options.Limit)) + } + if len(params) == 0 { + params = nil + } + + resp, err := r.client.api.get(ctx, "repos/commits", params, jwtToken, nil) + if err != nil { + return ListCommitsResult{}, err + } + defer resp.Body.Close() + + var payload listCommitsResponse + if err := decodeJSON(resp, &payload); err != nil { + return ListCommitsResult{}, err + } + + result := ListCommitsResult{HasMore: payload.HasMore} + if payload.NextCursor != "" { + result.NextCursor = payload.NextCursor + } + for _, commit := range payload.Commits { + result.Commits = append(result.Commits, CommitInfo{ + SHA: commit.SHA, + Message: commit.Message, + AuthorName: commit.AuthorName, + AuthorEmail: commit.AuthorEmail, + CommitterName: commit.CommitterName, + CommitterEmail: commit.CommitterEmail, + Date: parseTime(commit.Date), + RawDate: commit.Date, + }) + } + + return result, nil +} + +// GetNote reads a git note. +func (r *Repo) GetNote(ctx context.Context, options GetNoteOptions) (GetNoteResult, error) { + sha := strings.TrimSpace(options.SHA) + if sha == "" { + return GetNoteResult{}, errors.New("getNote sha is required") + } + + ttl := resolveInvocationTTL(options.InvocationOptions, defaultTokenTTL) + jwtToken, err := r.client.generateJWT(r.ID, RemoteURLOptions{Permissions: []Permission{PermissionGitRead}, TTL: ttl}) + if err != nil { + return GetNoteResult{}, err + } + + params := url.Values{} + params.Set("sha", sha) + + resp, err := r.client.api.get(ctx, "repos/notes", params, jwtToken, nil) + if err != nil { + return GetNoteResult{}, err + } + defer resp.Body.Close() + + var payload noteReadResponse + if err := decodeJSON(resp, &payload); err != nil { + return GetNoteResult{}, err + } + + return GetNoteResult{SHA: payload.SHA, Note: payload.Note, RefSHA: payload.RefSHA}, nil +} + +// CreateNote adds a git note. +func (r *Repo) CreateNote(ctx context.Context, options CreateNoteOptions) (NoteWriteResult, error) { + return r.writeNote(ctx, options.InvocationOptions, "add", options.SHA, options.Note, options.ExpectedRefSHA, options.Author) +} + +// AppendNote appends to a git note. +func (r *Repo) AppendNote(ctx context.Context, options AppendNoteOptions) (NoteWriteResult, error) { + return r.writeNote(ctx, options.InvocationOptions, "append", options.SHA, options.Note, options.ExpectedRefSHA, options.Author) +} + +// DeleteNote deletes a git note. +func (r *Repo) DeleteNote(ctx context.Context, options DeleteNoteOptions) (NoteWriteResult, error) { + sha := strings.TrimSpace(options.SHA) + if sha == "" { + return NoteWriteResult{}, errors.New("deleteNote sha is required") + } + + ttl := resolveInvocationTTL(options.InvocationOptions, defaultTokenTTL) + jwtToken, err := r.client.generateJWT(r.ID, RemoteURLOptions{Permissions: []Permission{PermissionGitWrite}, TTL: ttl}) + if err != nil { + return NoteWriteResult{}, err + } + + body := ¬eWriteRequest{SHA: sha} + if strings.TrimSpace(options.ExpectedRefSHA) != "" { + body.ExpectedRefSHA = options.ExpectedRefSHA + } + if options.Author != nil { + if strings.TrimSpace(options.Author.Name) == "" || strings.TrimSpace(options.Author.Email) == "" { + return NoteWriteResult{}, errors.New("deleteNote author name and email are required when provided") + } + body.Author = &authorInfo{Name: options.Author.Name, Email: options.Author.Email} + } + + resp, err := r.client.api.delete(ctx, "repos/notes", nil, body, jwtToken, &requestOptions{allowedStatus: noteWriteAllowedStatus}) + if err != nil { + return NoteWriteResult{}, err + } + defer resp.Body.Close() + + result, err := parseNoteWriteResponse(resp, "DELETE") + if err != nil { + return NoteWriteResult{}, err + } + if !result.Result.Success { + message := result.Result.Message + if strings.TrimSpace(message) == "" { + message = "deleteNote failed with status " + result.Result.Status + } + return NoteWriteResult{}, newRefUpdateError( + message, + result.Result.Status, + partialRefUpdate(result.TargetRef, result.BaseCommit, result.NewRefSHA), + ) + } + return result, nil +} + +func (r *Repo) writeNote(ctx context.Context, invocation InvocationOptions, action string, sha string, note string, expectedRefSHA string, author *NoteAuthor) (NoteWriteResult, error) { + sha = strings.TrimSpace(sha) + if sha == "" { + return NoteWriteResult{}, errors.New("note sha is required") + } + + note = strings.TrimSpace(note) + if note == "" { + return NoteWriteResult{}, errors.New("note content is required") + } + + ttl := resolveInvocationTTL(invocation, defaultTokenTTL) + jwtToken, err := r.client.generateJWT(r.ID, RemoteURLOptions{Permissions: []Permission{PermissionGitWrite}, TTL: ttl}) + if err != nil { + return NoteWriteResult{}, err + } + + body := ¬eWriteRequest{ + SHA: sha, + Action: action, + Note: note, + } + if strings.TrimSpace(expectedRefSHA) != "" { + body.ExpectedRefSHA = expectedRefSHA + } + if author != nil { + if strings.TrimSpace(author.Name) == "" || strings.TrimSpace(author.Email) == "" { + return NoteWriteResult{}, errors.New("note author name and email are required when provided") + } + body.Author = &authorInfo{Name: author.Name, Email: author.Email} + } + + resp, err := r.client.api.post(ctx, "repos/notes", nil, body, jwtToken, &requestOptions{allowedStatus: noteWriteAllowedStatus}) + if err != nil { + return NoteWriteResult{}, err + } + defer resp.Body.Close() + + result, err := parseNoteWriteResponse(resp, "POST") + if err != nil { + return NoteWriteResult{}, err + } + if !result.Result.Success { + message := result.Result.Message + if strings.TrimSpace(message) == "" { + if action == "append" { + message = "appendNote failed with status " + result.Result.Status + } else { + message = "createNote failed with status " + result.Result.Status + } + } + return NoteWriteResult{}, newRefUpdateError( + message, + result.Result.Status, + partialRefUpdate(result.TargetRef, result.BaseCommit, result.NewRefSHA), + ) + } + return result, nil +} + +// GetBranchDiff returns a diff for a branch. +func (r *Repo) GetBranchDiff(ctx context.Context, options GetBranchDiffOptions) (GetBranchDiffResult, error) { + if strings.TrimSpace(options.Branch) == "" { + return GetBranchDiffResult{}, errors.New("getBranchDiff branch is required") + } + + ttl := resolveInvocationTTL(options.InvocationOptions, defaultTokenTTL) + jwtToken, err := r.client.generateJWT(r.ID, RemoteURLOptions{Permissions: []Permission{PermissionGitRead}, TTL: ttl}) + if err != nil { + return GetBranchDiffResult{}, err + } + + params := url.Values{} + params.Set("branch", options.Branch) + if strings.TrimSpace(options.Base) != "" { + params.Set("base", options.Base) + } + if options.Ephemeral != nil { + params.Set("ephemeral", boolToString(*options.Ephemeral)) + } + if options.EphemeralBase != nil { + params.Set("ephemeral_base", boolToString(*options.EphemeralBase)) + } + for _, path := range options.Paths { + if strings.TrimSpace(path) != "" { + params.Add("path", path) + } + } + + resp, err := r.client.api.get(ctx, "repos/branches/diff", params, jwtToken, nil) + if err != nil { + return GetBranchDiffResult{}, err + } + defer resp.Body.Close() + + var payload branchDiffResponse + if err := decodeJSON(resp, &payload); err != nil { + return GetBranchDiffResult{}, err + } + + return transformBranchDiff(payload), nil +} + +// GetCommitDiff returns a diff for a commit. +func (r *Repo) GetCommitDiff(ctx context.Context, options GetCommitDiffOptions) (GetCommitDiffResult, error) { + if strings.TrimSpace(options.SHA) == "" { + return GetCommitDiffResult{}, errors.New("getCommitDiff sha is required") + } + + ttl := resolveInvocationTTL(options.InvocationOptions, defaultTokenTTL) + jwtToken, err := r.client.generateJWT(r.ID, RemoteURLOptions{Permissions: []Permission{PermissionGitRead}, TTL: ttl}) + if err != nil { + return GetCommitDiffResult{}, err + } + + params := url.Values{} + params.Set("sha", options.SHA) + if strings.TrimSpace(options.BaseSHA) != "" { + params.Set("baseSha", options.BaseSHA) + } + for _, path := range options.Paths { + if strings.TrimSpace(path) != "" { + params.Add("path", path) + } + } + + resp, err := r.client.api.get(ctx, "repos/diff", params, jwtToken, nil) + if err != nil { + return GetCommitDiffResult{}, err + } + defer resp.Body.Close() + + var payload commitDiffResponse + if err := decodeJSON(resp, &payload); err != nil { + return GetCommitDiffResult{}, err + } + + return transformCommitDiff(payload), nil +} + +// Grep runs a grep query. +func (r *Repo) Grep(ctx context.Context, options GrepOptions) (GrepResult, error) { + pattern := strings.TrimSpace(options.Query.Pattern) + if pattern == "" { + return GrepResult{}, errors.New("grep query.pattern is required") + } + + ttl := resolveInvocationTTL(options.InvocationOptions, defaultTokenTTL) + jwtToken, err := r.client.generateJWT(r.ID, RemoteURLOptions{Permissions: []Permission{PermissionGitRead}, TTL: ttl}) + if err != nil { + return GrepResult{}, err + } + + body := &grepRequest{ + Query: grepQueryPayload{ + Pattern: pattern, + CaseSensitive: options.Query.CaseSensitive, + }, + } + if options.Ref != "" { + body.Rev = options.Ref + } + if len(options.Paths) > 0 { + body.Paths = options.Paths + } + if options.FileFilters != nil { + filters := &grepFileFilterPayload{} + hasFilters := false + if len(options.FileFilters.IncludeGlobs) > 0 { + filters.IncludeGlobs = options.FileFilters.IncludeGlobs + hasFilters = true + } + if len(options.FileFilters.ExcludeGlobs) > 0 { + filters.ExcludeGlobs = options.FileFilters.ExcludeGlobs + hasFilters = true + } + if len(options.FileFilters.ExtensionFilters) > 0 { + filters.ExtensionFilters = options.FileFilters.ExtensionFilters + hasFilters = true + } + if hasFilters { + body.FileFilters = filters + } + } + if options.Context != nil { + ctx := &grepContextPayload{} + hasCtx := false + if options.Context.Before != nil { + ctx.Before = options.Context.Before + hasCtx = true + } + if options.Context.After != nil { + ctx.After = options.Context.After + hasCtx = true + } + if hasCtx { + body.Context = ctx + } + } + if options.Limits != nil { + limits := &grepLimitsPayload{} + hasLimits := false + if options.Limits.MaxLines != nil { + limits.MaxLines = options.Limits.MaxLines + hasLimits = true + } + if options.Limits.MaxMatchesPerFile != nil { + limits.MaxMatchesPerFile = options.Limits.MaxMatchesPerFile + hasLimits = true + } + if hasLimits { + body.Limits = limits + } + } + if options.Pagination != nil { + pagination := &grepPaginationPayload{} + hasPagination := false + if strings.TrimSpace(options.Pagination.Cursor) != "" { + pagination.Cursor = options.Pagination.Cursor + hasPagination = true + } + if options.Pagination.Limit != nil { + pagination.Limit = options.Pagination.Limit + hasPagination = true + } + if hasPagination { + body.Pagination = pagination + } + } + + resp, err := r.client.api.post(ctx, "repos/grep", nil, body, jwtToken, nil) + if err != nil { + return GrepResult{}, err + } + defer resp.Body.Close() + + var payload grepResponse + if err := decodeJSON(resp, &payload); err != nil { + return GrepResult{}, err + } + + result := GrepResult{ + Query: GrepQuery{Pattern: payload.Query.Pattern, CaseSensitive: &payload.Query.CaseSensitive}, + Repo: GrepRepo{Ref: payload.Repo.Ref, Commit: payload.Repo.Commit}, + HasMore: payload.HasMore, + } + if payload.NextCursor != "" { + result.NextCursor = payload.NextCursor + } + for _, match := range payload.Matches { + entry := GrepFileMatch{Path: match.Path} + for _, line := range match.Lines { + entry.Lines = append(entry.Lines, GrepLine{LineNumber: line.LineNumber, Text: line.Text, Type: line.Type}) + } + result.Matches = append(result.Matches, entry) + } + + return result, nil +} + +// PullUpstream triggers a pull-upstream operation. +func (r *Repo) PullUpstream(ctx context.Context, options PullUpstreamOptions) error { + ttl := resolveInvocationTTL(options.InvocationOptions, defaultTokenTTL) + jwtToken, err := r.client.generateJWT(r.ID, RemoteURLOptions{Permissions: []Permission{PermissionGitWrite}, TTL: ttl}) + if err != nil { + return err + } + + body := &pullUpstreamRequest{} + if strings.TrimSpace(options.Ref) != "" { + body.Ref = options.Ref + } + + resp, err := r.client.api.post(ctx, "repos/pull-upstream", nil, body, jwtToken, nil) + if err != nil { + return err + } + defer resp.Body.Close() + + if resp.StatusCode != 202 { + return errors.New("Pull Upstream failed: " + resp.Status) + } + return nil +} + +// CreateBranch creates a new branch. +func (r *Repo) CreateBranch(ctx context.Context, options CreateBranchOptions) (CreateBranchResult, error) { + baseBranch := strings.TrimSpace(options.BaseBranch) + targetBranch := strings.TrimSpace(options.TargetBranch) + if baseBranch == "" { + return CreateBranchResult{}, errors.New("createBranch baseBranch is required") + } + if targetBranch == "" { + return CreateBranchResult{}, errors.New("createBranch targetBranch is required") + } + + ttl := resolveInvocationTTL(options.InvocationOptions, defaultTokenTTL) + jwtToken, err := r.client.generateJWT(r.ID, RemoteURLOptions{Permissions: []Permission{PermissionGitWrite}, TTL: ttl}) + if err != nil { + return CreateBranchResult{}, err + } + + body := &createBranchRequest{ + BaseBranch: baseBranch, + TargetBranch: targetBranch, + BaseIsEphemeral: options.BaseIsEphemeral, + TargetIsEphemeral: options.TargetIsEphemeral, + } + + resp, err := r.client.api.post(ctx, "repos/branches/create", nil, body, jwtToken, nil) + if err != nil { + return CreateBranchResult{}, err + } + defer resp.Body.Close() + + var payload createBranchResponse + if err := decodeJSON(resp, &payload); err != nil { + return CreateBranchResult{}, err + } + + result := CreateBranchResult{ + Message: payload.Message, + TargetBranch: payload.TargetBranch, + TargetIsEphemeral: payload.TargetIsEphemeral, + CommitSHA: payload.CommitSHA, + } + return result, nil +} + +// RestoreCommit restores a commit into a branch. +func (r *Repo) RestoreCommit(ctx context.Context, options RestoreCommitOptions) (RestoreCommitResult, error) { + targetBranch := strings.TrimSpace(options.TargetBranch) + if targetBranch == "" { + return RestoreCommitResult{}, errors.New("restoreCommit targetBranch is required") + } + if strings.HasPrefix(targetBranch, "refs/") { + return RestoreCommitResult{}, errors.New("restoreCommit targetBranch must not include refs/ prefix") + } + + targetSHA := strings.TrimSpace(options.TargetCommitSHA) + if targetSHA == "" { + return RestoreCommitResult{}, errors.New("restoreCommit targetCommitSha is required") + } + + if strings.TrimSpace(options.Author.Name) == "" || strings.TrimSpace(options.Author.Email) == "" { + return RestoreCommitResult{}, errors.New("restoreCommit author name and email are required") + } + + ttl := resolveCommitTTL(options.InvocationOptions, defaultTokenTTL) + jwtToken, err := r.client.generateJWT(r.ID, RemoteURLOptions{Permissions: []Permission{PermissionGitWrite}, TTL: ttl}) + if err != nil { + return RestoreCommitResult{}, err + } + + metadata := &restoreCommitMetadata{ + TargetBranch: targetBranch, + TargetCommitSHA: targetSHA, + Author: authorInfo{ + Name: strings.TrimSpace(options.Author.Name), + Email: strings.TrimSpace(options.Author.Email), + }, + } + + if strings.TrimSpace(options.CommitMessage) != "" { + metadata.CommitMessage = options.CommitMessage + } + if strings.TrimSpace(options.ExpectedHeadSHA) != "" { + metadata.ExpectedHeadSHA = options.ExpectedHeadSHA + } + if options.Committer != nil { + if strings.TrimSpace(options.Committer.Name) == "" || strings.TrimSpace(options.Committer.Email) == "" { + return RestoreCommitResult{}, errors.New("restoreCommit committer name and email are required when provided") + } + metadata.Committer = &authorInfo{ + Name: strings.TrimSpace(options.Committer.Name), + Email: strings.TrimSpace(options.Committer.Email), + } + } + + resp, err := r.client.api.post(ctx, "repos/restore-commit", nil, &metadataEnvelope{Metadata: metadata}, jwtToken, &requestOptions{allowedStatus: restoreCommitAllowedStatus}) + if err != nil { + return RestoreCommitResult{}, err + } + defer resp.Body.Close() + + payloadBytes, err := readAll(resp) + if err != nil { + return RestoreCommitResult{}, err + } + + ack, failure := parseRestoreCommitPayload(payloadBytes) + if ack != nil { + return buildRestoreCommitResult(*ack) + } + + status := "" + message := "" + var refUpdate *RefUpdate + if failure != nil { + status = failure.Status + message = failure.Message + refUpdate = failure.RefUpdate + } + if status == "" { + status = httpStatusToRestoreStatus(resp.StatusCode) + } + if message == "" { + message = "Restore commit failed with HTTP " + itoa(resp.StatusCode) + } + + return RestoreCommitResult{}, newRefUpdateError(message, status, refUpdate) +} + +// CreateCommit starts a commit builder. +func (r *Repo) CreateCommit(options CommitOptions) (*CommitBuilder, error) { + builder := &CommitBuilder{options: options, client: r.client, repoID: r.ID} + if err := builder.normalize(); err != nil { + return nil, err + } + return builder, nil +} + +// CreateCommitFromDiff applies a pre-generated diff. +func (r *Repo) CreateCommitFromDiff(ctx context.Context, options CommitFromDiffOptions) (CommitResult, error) { + exec := diffCommitExecutor{options: options, client: r.client} + return exec.send(ctx, r.ID) +} diff --git a/packages/git-storage-sdk-go/repo_test.go b/packages/git-storage-sdk-go/repo_test.go new file mode 100644 index 000000000..fdb54da19 --- /dev/null +++ b/packages/git-storage-sdk-go/repo_test.go @@ -0,0 +1,224 @@ +package storage + +import ( + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "strings" + "testing" + "time" +) + +func TestRemoteURLJWT(t *testing.T) { + client, err := NewClient(Options{Name: "acme", Key: testKey, StorageBaseURL: "acme.code.storage"}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo-1", DefaultBranch: "main", client: client} + + remote, err := repo.RemoteURL(nil, RemoteURLOptions{}) + if err != nil { + t.Fatalf("remote url error: %v", err) + } + if !strings.Contains(remote, "repo-1.git") { + t.Fatalf("expected repo in url: %s", remote) + } + claims := parseJWTFromURL(t, remote) + if claims["repo"] != "repo-1" { + t.Fatalf("expected repo claim") + } +} + +func TestEphemeralRemoteURL(t *testing.T) { + client, err := NewClient(Options{Name: "acme", Key: testKey, StorageBaseURL: "acme.code.storage"}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo-1", DefaultBranch: "main", client: client} + + remote, err := repo.EphemeralRemoteURL(nil, RemoteURLOptions{}) + if err != nil { + t.Fatalf("remote url error: %v", err) + } + if !strings.Contains(remote, "repo-1+ephemeral.git") { + t.Fatalf("expected ephemeral url: %s", remote) + } +} + +func TestListFilesEphemeral(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + q := r.URL.Query() + if q.Get("ref") != "feature/demo" || q.Get("ephemeral") != "true" { + t.Fatalf("unexpected query: %s", r.URL.RawQuery) + } + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"paths":["docs/readme.md"],"ref":"refs/namespaces/ephemeral/refs/heads/feature/demo"}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + flag := true + result, err := repo.ListFiles(nil, ListFilesOptions{Ref: "feature/demo", Ephemeral: &flag}) + if err != nil { + t.Fatalf("list files error: %v", err) + } + if result.Ref == "" || len(result.Paths) != 1 { + t.Fatalf("unexpected result") + } +} + +func TestGrepRequestBody(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + var body map[string]interface{} + _ = json.NewDecoder(r.Body).Decode(&body) + if body["rev"] != "main" { + t.Fatalf("expected rev main") + } + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"query":{"pattern":"SEARCH","case_sensitive":false},"repo":{"ref":"main","commit":"deadbeef"},"matches":[],"has_more":false}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + _, err = repo.Grep(nil, GrepOptions{ + Ref: "main", + Paths: []string{"src/"}, + Query: GrepQuery{Pattern: "SEARCH", CaseSensitive: boolPtr(false)}, + }) + if err != nil { + t.Fatalf("grep error: %v", err) + } +} + +func TestCreateBranchTTL(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + token := strings.TrimPrefix(r.Header.Get("Authorization"), "Bearer ") + claims := parseJWTFromToken(t, token) + exp := int64(claims["exp"].(float64)) + iat := int64(claims["iat"].(float64)) + if exp-iat != 600 { + t.Fatalf("expected ttl 600, got %d", exp-iat) + } + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"message":"branch created","target_branch":"feature/demo","target_is_ephemeral":false}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + _, err = repo.CreateBranch(nil, CreateBranchOptions{BaseBranch: "main", TargetBranch: "feature/demo", InvocationOptions: InvocationOptions{TTL: 600 * time.Second}}) + if err != nil { + t.Fatalf("create branch error: %v", err) + } +} + +func TestRestoreCommitConflict(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusConflict) + payload := map[string]interface{}{ + "commit": map[string]interface{}{ + "commit_sha": "cafefeed", + "tree_sha": "feedface", + "target_branch": "main", + "pack_bytes": 0, + }, + "result": map[string]interface{}{ + "branch": "main", + "old_sha": "old", + "new_sha": "new", + "success": false, + "status": "precondition_failed", + "message": "branch moved", + }, + } + _ = json.NewEncoder(w).Encode(payload) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + _, err = repo.RestoreCommit(nil, RestoreCommitOptions{ + TargetBranch: "main", + TargetCommitSHA: "abc", + Author: CommitSignature{Name: "Author", Email: "author@example.com"}, + }) + if err == nil { + t.Fatalf("expected error") + } + if !strings.Contains(err.Error(), "branch moved") { + t.Fatalf("unexpected error: %v", err) + } +} + +func TestNoteWritePayload(t *testing.T) { + var captured []byte + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + captured, _ = io.ReadAll(r.Body) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"sha":"abc","target_ref":"refs/notes/commits","new_ref_sha":"def","result":{"success":true,"status":"ok"}}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + _, err = repo.CreateNote(nil, CreateNoteOptions{SHA: "abc", Note: "note"}) + if err != nil { + t.Fatalf("create note error: %v", err) + } + + var payload map[string]interface{} + _ = json.Unmarshal(captured, &payload) + if payload["action"] != "add" { + t.Fatalf("expected add action") + } +} + +func TestCommitDiffQuery(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + q := r.URL.Query() + if q.Get("sha") != "abc" || q.Get("baseSha") != "base" { + t.Fatalf("unexpected query: %s", r.URL.RawQuery) + } + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"sha":"abc","stats":{"files":1,"additions":1,"deletions":0,"changes":1},"files":[],"filtered_files":[]}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + _, err = repo.GetCommitDiff(nil, GetCommitDiffOptions{SHA: "abc", BaseSHA: "base"}) + if err != nil { + t.Fatalf("commit diff error: %v", err) + } +} + +func boolPtr(value bool) *bool { + return &value +} diff --git a/packages/git-storage-sdk-go/requests.go b/packages/git-storage-sdk-go/requests.go new file mode 100644 index 000000000..a642a20be --- /dev/null +++ b/packages/git-storage-sdk-go/requests.go @@ -0,0 +1,141 @@ +package storage + +// createRepoRequest is the JSON body for CreateRepo. +type createRepoRequest struct { + BaseRepo *baseRepoPayload `json:"base_repo,omitempty"` + DefaultBranch string `json:"default_branch,omitempty"` +} + +type baseRepoPayload struct { + Provider string `json:"provider"` + Owner string `json:"owner"` + Name string `json:"name"` + Operation string `json:"operation,omitempty"` + Auth *authPayload `json:"auth,omitempty"` + Ref string `json:"ref,omitempty"` + SHA string `json:"sha,omitempty"` + DefaultBranch string `json:"default_branch,omitempty"` +} + +type authPayload struct { + Token string `json:"token"` +} + +// noteWriteRequest is the JSON body for note write operations. +type noteWriteRequest struct { + SHA string `json:"sha"` + Action string `json:"action,omitempty"` + Note string `json:"note,omitempty"` + ExpectedRefSHA string `json:"expected_ref_sha,omitempty"` + Author *authorInfo `json:"author,omitempty"` +} + +type authorInfo struct { + Name string `json:"name"` + Email string `json:"email"` +} + +// grepRequest is the JSON body for Grep. +type grepRequest struct { + Query grepQueryPayload `json:"query"` + Rev string `json:"rev,omitempty"` + Paths []string `json:"paths,omitempty"` + FileFilters *grepFileFilterPayload `json:"file_filters,omitempty"` + Context *grepContextPayload `json:"context,omitempty"` + Limits *grepLimitsPayload `json:"limits,omitempty"` + Pagination *grepPaginationPayload `json:"pagination,omitempty"` +} + +type grepQueryPayload struct { + Pattern string `json:"pattern"` + CaseSensitive *bool `json:"case_sensitive,omitempty"` +} + +type grepFileFilterPayload struct { + IncludeGlobs []string `json:"include_globs,omitempty"` + ExcludeGlobs []string `json:"exclude_globs,omitempty"` + ExtensionFilters []string `json:"extension_filters,omitempty"` +} + +type grepContextPayload struct { + Before *int `json:"before,omitempty"` + After *int `json:"after,omitempty"` +} + +type grepLimitsPayload struct { + MaxLines *int `json:"max_lines,omitempty"` + MaxMatchesPerFile *int `json:"max_matches_per_file,omitempty"` +} + +type grepPaginationPayload struct { + Cursor string `json:"cursor,omitempty"` + Limit *int `json:"limit,omitempty"` +} + +// pullUpstreamRequest is the JSON body for PullUpstream. +type pullUpstreamRequest struct { + Ref string `json:"ref,omitempty"` +} + +// createBranchRequest is the JSON body for CreateBranch. +type createBranchRequest struct { + BaseBranch string `json:"base_branch"` + TargetBranch string `json:"target_branch"` + BaseIsEphemeral bool `json:"base_is_ephemeral,omitempty"` + TargetIsEphemeral bool `json:"target_is_ephemeral,omitempty"` +} + +// commitMetadataPayload is the JSON body for commit metadata. +type commitMetadataPayload struct { + TargetBranch string `json:"target_branch"` + CommitMessage string `json:"commit_message"` + Author authorInfo `json:"author"` + Committer *authorInfo `json:"committer,omitempty"` + ExpectedHeadSHA string `json:"expected_head_sha,omitempty"` + BaseBranch string `json:"base_branch,omitempty"` + Ephemeral bool `json:"ephemeral,omitempty"` + EphemeralBase bool `json:"ephemeral_base,omitempty"` + Files []fileEntryPayload `json:"files,omitempty"` +} + +type fileEntryPayload struct { + Path string `json:"path"` + ContentID string `json:"content_id"` + Operation string `json:"operation"` + Mode string `json:"mode,omitempty"` +} + +type metadataEnvelope struct { + Metadata interface{} `json:"metadata"` +} + +// restoreCommitMetadata is the JSON body for RestoreCommit. +type restoreCommitMetadata struct { + TargetBranch string `json:"target_branch"` + TargetCommitSHA string `json:"target_commit_sha"` + CommitMessage string `json:"commit_message,omitempty"` + ExpectedHeadSHA string `json:"expected_head_sha,omitempty"` + Author authorInfo `json:"author"` + Committer *authorInfo `json:"committer,omitempty"` +} + +// blobChunkEnvelope wraps a blob chunk for ndjson streaming. +type blobChunkEnvelope struct { + BlobChunk blobChunkPayload `json:"blob_chunk"` +} + +type blobChunkPayload struct { + ContentID string `json:"content_id"` + Data string `json:"data"` + EOF bool `json:"eof"` +} + +// diffChunkEnvelope wraps a diff chunk for ndjson streaming. +type diffChunkEnvelope struct { + DiffChunk diffChunkPayload `json:"diff_chunk"` +} + +type diffChunkPayload struct { + Data string `json:"data"` + EOF bool `json:"eof"` +} diff --git a/packages/git-storage-sdk-go/responses.go b/packages/git-storage-sdk-go/responses.go new file mode 100644 index 000000000..dea50fa7f --- /dev/null +++ b/packages/git-storage-sdk-go/responses.go @@ -0,0 +1,180 @@ +package storage + +type listFilesResponse struct { + Paths []string `json:"paths"` + Ref string `json:"ref"` +} + +type listBranchesResponse struct { + Branches []branchInfoRaw `json:"branches"` + NextCursor string `json:"next_cursor"` + HasMore bool `json:"has_more"` +} + +type branchInfoRaw struct { + Cursor string `json:"cursor"` + Name string `json:"name"` + HeadSHA string `json:"head_sha"` + CreatedAt string `json:"created_at"` +} + +type listCommitsResponse struct { + Commits []commitInfoRaw `json:"commits"` + NextCursor string `json:"next_cursor"` + HasMore bool `json:"has_more"` +} + +type commitInfoRaw struct { + SHA string `json:"sha"` + Message string `json:"message"` + AuthorName string `json:"author_name"` + AuthorEmail string `json:"author_email"` + CommitterName string `json:"committer_name"` + CommitterEmail string `json:"committer_email"` + Date string `json:"date"` +} + +type listReposResponse struct { + Repos []repoInfoRaw `json:"repos"` + NextCursor string `json:"next_cursor"` + HasMore bool `json:"has_more"` +} + +type repoInfoRaw struct { + RepoID string `json:"repo_id"` + URL string `json:"url"` + DefaultBranch string `json:"default_branch"` + CreatedAt string `json:"created_at"` + BaseRepo *repoBaseInfo `json:"base_repo"` +} + +type repoBaseInfo struct { + Provider string `json:"provider"` + Owner string `json:"owner"` + Name string `json:"name"` +} + +type noteReadResponse struct { + SHA string `json:"sha"` + Note string `json:"note"` + RefSHA string `json:"ref_sha"` +} + +type noteWriteResponse struct { + SHA string `json:"sha"` + TargetRef string `json:"target_ref"` + BaseCommit string `json:"base_commit"` + NewRefSHA string `json:"new_ref_sha"` + Result noteResult `json:"result"` +} + +type noteResult struct { + Success bool `json:"success"` + Status string `json:"status"` + Message string `json:"message"` +} + +type diffStatsRaw struct { + Files int `json:"files"` + Additions int `json:"additions"` + Deletions int `json:"deletions"` + Changes int `json:"changes"` +} + +type fileDiffRaw struct { + Path string `json:"path"` + State string `json:"state"` + OldPath string `json:"old_path"` + Raw string `json:"raw"` + Bytes int `json:"bytes"` + IsEOF bool `json:"is_eof"` +} + +type filteredFileRaw struct { + Path string `json:"path"` + State string `json:"state"` + OldPath string `json:"old_path"` + Bytes int `json:"bytes"` + IsEOF bool `json:"is_eof"` +} + +type branchDiffResponse struct { + Branch string `json:"branch"` + Base string `json:"base"` + Stats diffStatsRaw `json:"stats"` + Files []fileDiffRaw `json:"files"` + FilteredFiles []filteredFileRaw `json:"filtered_files"` +} + +type commitDiffResponse struct { + SHA string `json:"sha"` + Stats diffStatsRaw `json:"stats"` + Files []fileDiffRaw `json:"files"` + FilteredFiles []filteredFileRaw `json:"filtered_files"` +} + +type createBranchResponse struct { + Message string `json:"message"` + TargetBranch string `json:"target_branch"` + TargetIsEphemeral bool `json:"target_is_ephemeral"` + CommitSHA string `json:"commit_sha"` +} + +type grepResponse struct { + Query struct { + Pattern string `json:"pattern"` + CaseSensitive bool `json:"case_sensitive"` + } `json:"query"` + Repo struct { + Ref string `json:"ref"` + Commit string `json:"commit"` + } `json:"repo"` + Matches []grepFileMatchRaw `json:"matches"` + NextCursor string `json:"next_cursor"` + HasMore bool `json:"has_more"` +} + +type grepFileMatchRaw struct { + Path string `json:"path"` + Lines []grepLineRaw `json:"lines"` +} + +type grepLineRaw struct { + LineNumber int `json:"line_number"` + Text string `json:"text"` + Type string `json:"type"` +} + +type restoreCommitAck struct { + Commit struct { + CommitSHA string `json:"commit_sha"` + TreeSHA string `json:"tree_sha"` + TargetBranch string `json:"target_branch"` + PackBytes int `json:"pack_bytes"` + } `json:"commit"` + Result struct { + Branch string `json:"branch"` + OldSHA string `json:"old_sha"` + NewSHA string `json:"new_sha"` + Success bool `json:"success"` + Status string `json:"status"` + Message string `json:"message"` + } `json:"result"` +} + +type restoreCommitResponse struct { + Commit *struct { + CommitSHA string `json:"commit_sha"` + TreeSHA string `json:"tree_sha"` + TargetBranch string `json:"target_branch"` + PackBytes int `json:"pack_bytes"` + } `json:"commit"` + Result struct { + Branch string `json:"branch"` + OldSHA string `json:"old_sha"` + NewSHA string `json:"new_sha"` + Success *bool `json:"success"` + Status string `json:"status"` + Message string `json:"message"` + } `json:"result"` +} diff --git a/packages/git-storage-sdk-go/types.go b/packages/git-storage-sdk-go/types.go new file mode 100644 index 000000000..1feb6e58b --- /dev/null +++ b/packages/git-storage-sdk-go/types.go @@ -0,0 +1,607 @@ +package storage + +import ( + "crypto/ecdsa" + "net/http" + "time" +) + +const DefaultAPIVersion = 1 + +// Permission defines JWT scopes supported by the API. +type Permission string + +const ( + PermissionGitRead Permission = "git:read" + PermissionGitWrite Permission = "git:write" + PermissionRepoWrite Permission = "repo:write" + PermissionOrgRead Permission = "org:read" +) + +// Options configure the Git storage client. +type Options struct { + Name string + Key string + APIBaseURL string + StorageBaseURL string + APIVersion int + DefaultTTL time.Duration + HTTPClient *http.Client +} + +// RemoteURLOptions configure token generation for remote URLs. +type RemoteURLOptions struct { + Permissions []Permission + TTL time.Duration +} + +// InvocationOptions holds common request options. +type InvocationOptions struct { + TTL time.Duration +} + +// FindOneOptions identifies a repository by ID. +type FindOneOptions struct { + ID string +} + +// SupportedRepoProvider lists base repo providers. +type SupportedRepoProvider string + +const ( + RepoProviderGitHub SupportedRepoProvider = "github" +) + +// BaseRepo is a base repository definition for create repo. +type BaseRepo interface { + isBaseRepo() +} + +// GitHubBaseRepo references a GitHub repository. +type GitHubBaseRepo struct { + Provider SupportedRepoProvider + Owner string + Name string + DefaultBranch string +} + +func (GitHubBaseRepo) isBaseRepo() {} + +// ForkBaseRepo references an existing Pierre repository to fork. +type ForkBaseRepo struct { + ID string + Ref string + SHA string +} + +func (ForkBaseRepo) isBaseRepo() {} + +// RepoBaseInfo describes a base repo on list results. +type RepoBaseInfo struct { + Provider string + Owner string + Name string +} + +// RepoInfo describes a repo in list results. +type RepoInfo struct { + RepoID string + URL string + DefaultBranch string + CreatedAt string + BaseRepo *RepoBaseInfo +} + +// ListReposOptions controls list repos. +type ListReposOptions struct { + InvocationOptions + Cursor string + Limit int +} + +// ListReposResult returns paginated repos. +type ListReposResult struct { + Repos []RepoInfo + NextCursor string + HasMore bool +} + +// CreateRepoOptions controls repo creation. +type CreateRepoOptions struct { + InvocationOptions + ID string + BaseRepo BaseRepo + DefaultBranch string +} + +// DeleteRepoOptions controls repo deletion. +type DeleteRepoOptions struct { + InvocationOptions + ID string +} + +// DeleteRepoResult describes deletion result. +type DeleteRepoResult struct { + RepoID string + Message string +} + +// GetFileOptions configures file download. +type GetFileOptions struct { + InvocationOptions + Path string + Ref string + Ephemeral *bool + EphemeralBase *bool +} + +// PullUpstreamOptions configures pull-upstream. +type PullUpstreamOptions struct { + InvocationOptions + Ref string +} + +// ListFilesOptions configures list files. +type ListFilesOptions struct { + InvocationOptions + Ref string + Ephemeral *bool +} + +// ListFilesResult describes file list. +type ListFilesResult struct { + Paths []string + Ref string +} + +// ListBranchesOptions configures list branches. +type ListBranchesOptions struct { + InvocationOptions + Cursor string + Limit int +} + +// BranchInfo describes a branch. +type BranchInfo struct { + Cursor string + Name string + HeadSHA string + CreatedAt string +} + +// ListBranchesResult describes branches list. +type ListBranchesResult struct { + Branches []BranchInfo + NextCursor string + HasMore bool +} + +// CreateBranchOptions configures branch creation. +type CreateBranchOptions struct { + InvocationOptions + BaseBranch string + TargetBranch string + BaseIsEphemeral bool + TargetIsEphemeral bool +} + +// CreateBranchResult describes branch creation result. +type CreateBranchResult struct { + Message string + TargetBranch string + TargetIsEphemeral bool + CommitSHA string +} + +// ListCommitsOptions configures list commits. +type ListCommitsOptions struct { + InvocationOptions + Branch string + Cursor string + Limit int +} + +// CommitInfo describes a commit entry. +type CommitInfo struct { + SHA string + Message string + AuthorName string + AuthorEmail string + CommitterName string + CommitterEmail string + Date time.Time + RawDate string +} + +// ListCommitsResult describes commits list. +type ListCommitsResult struct { + Commits []CommitInfo + NextCursor string + HasMore bool +} + +// NoteAuthor identifies note author. +type NoteAuthor struct { + Name string + Email string +} + +// GetNoteOptions configures get note. +type GetNoteOptions struct { + InvocationOptions + SHA string +} + +// GetNoteResult describes note read. +type GetNoteResult struct { + SHA string + Note string + RefSHA string +} + +// CreateNoteOptions configures note creation. +type CreateNoteOptions struct { + InvocationOptions + SHA string + Note string + ExpectedRefSHA string + Author *NoteAuthor +} + +// AppendNoteOptions configures note append. +type AppendNoteOptions struct { + InvocationOptions + SHA string + Note string + ExpectedRefSHA string + Author *NoteAuthor +} + +// DeleteNoteOptions configures note delete. +type DeleteNoteOptions struct { + InvocationOptions + SHA string + ExpectedRefSHA string + Author *NoteAuthor +} + +// NoteWriteResult describes note write response. +type NoteWriteResult struct { + SHA string + TargetRef string + BaseCommit string + NewRefSHA string + Result NoteResult +} + +// NoteResult describes note write status. +type NoteResult struct { + Success bool + Status string + Message string +} + +// DiffFileState normalizes diff status. +type DiffFileState string + +const ( + DiffStateAdded DiffFileState = "added" + DiffStateModified DiffFileState = "modified" + DiffStateDeleted DiffFileState = "deleted" + DiffStateRenamed DiffFileState = "renamed" + DiffStateCopied DiffFileState = "copied" + DiffStateTypeChanged DiffFileState = "type_changed" + DiffStateUnmerged DiffFileState = "unmerged" + DiffStateUnknown DiffFileState = "unknown" +) + +// DiffStats describes diff stats. +type DiffStats struct { + Files int + Additions int + Deletions int + Changes int +} + +// FileDiff describes a diffed file. +type FileDiff struct { + Path string + State DiffFileState + RawState string + OldPath string + Raw string + Bytes int + IsEOF bool +} + +// FilteredFile describes a filtered diff file. +type FilteredFile struct { + Path string + State DiffFileState + RawState string + OldPath string + Bytes int + IsEOF bool +} + +// GetBranchDiffOptions configures branch diff. +type GetBranchDiffOptions struct { + InvocationOptions + Branch string + Base string + Ephemeral *bool + EphemeralBase *bool + Paths []string +} + +// GetBranchDiffResult describes branch diff. +type GetBranchDiffResult struct { + Branch string + Base string + Stats DiffStats + Files []FileDiff + FilteredFiles []FilteredFile +} + +// GetCommitDiffOptions configures commit diff. +type GetCommitDiffOptions struct { + InvocationOptions + SHA string + BaseSHA string + Paths []string +} + +// GetCommitDiffResult describes commit diff. +type GetCommitDiffResult struct { + SHA string + Stats DiffStats + Files []FileDiff + FilteredFiles []FilteredFile +} + +// GrepOptions configures grep. +type GrepOptions struct { + InvocationOptions + Ref string + Query GrepQuery + Paths []string + FileFilters *GrepFileFilters + Context *GrepContext + Limits *GrepLimits + Pagination *GrepPagination +} + +// GrepQuery describes grep query. +type GrepQuery struct { + Pattern string + CaseSensitive *bool +} + +// GrepFileFilters describes file filters for grep. +type GrepFileFilters struct { + IncludeGlobs []string + ExcludeGlobs []string + ExtensionFilters []string +} + +// GrepContext configures context lines. +type GrepContext struct { + Before *int + After *int +} + +// GrepLimits configures grep limits. +type GrepLimits struct { + MaxLines *int + MaxMatchesPerFile *int +} + +// GrepPagination configures grep pagination. +type GrepPagination struct { + Cursor string + Limit *int +} + +// GrepLine describes a grep line match. +type GrepLine struct { + LineNumber int + Text string + Type string +} + +// GrepFileMatch describes matches in a file. +type GrepFileMatch struct { + Path string + Lines []GrepLine +} + +// GrepResult describes grep results. +type GrepResult struct { + Query GrepQuery + Repo GrepRepo + Matches []GrepFileMatch + NextCursor string + HasMore bool +} + +// GrepRepo describes grep repo info. +type GrepRepo struct { + Ref string + Commit string +} + +// CommitSignature identifies an author/committer. +type CommitSignature struct { + Name string + Email string +} + +// GitFileMode describes git file mode. +type GitFileMode string + +const ( + GitFileModeRegular GitFileMode = "100644" + GitFileModeExecutable GitFileMode = "100755" + GitFileModeSymlink GitFileMode = "120000" + GitFileModeSubmodule GitFileMode = "160000" +) + +// CommitFileOptions configures file operations. +type CommitFileOptions struct { + Mode GitFileMode +} + +// CommitTextFileOptions configures text files. +type CommitTextFileOptions struct { + CommitFileOptions + Encoding string +} + +// CommitResult describes commit results. +type CommitResult struct { + CommitSHA string + TreeSHA string + TargetBranch string + PackBytes int + BlobCount int + RefUpdate RefUpdate +} + +// RefUpdate describes ref update details. +type RefUpdate struct { + Branch string + OldSHA string + NewSHA string +} + +// CommitBuilder queues commit operations. +type CommitBuilder struct { + options CommitOptions + ops []commitOperation + client *Client + repoID string + sent bool +} + +// CommitOptions configures commit operations. +type CommitOptions struct { + InvocationOptions + TargetBranch string + TargetRef string + CommitMessage string + ExpectedHeadSHA string + BaseBranch string + Ephemeral bool + EphemeralBase bool + Author CommitSignature + Committer *CommitSignature +} + +// CommitFromDiffOptions configures diff commit. +type CommitFromDiffOptions struct { + InvocationOptions + TargetBranch string + CommitMessage string + Diff interface{} + ExpectedHeadSHA string + BaseBranch string + Ephemeral bool + EphemeralBase bool + Author CommitSignature + Committer *CommitSignature +} + +// RestoreCommitOptions configures restore commit. +type RestoreCommitOptions struct { + InvocationOptions + TargetBranch string + TargetCommitSHA string + CommitMessage string + ExpectedHeadSHA string + Author CommitSignature + Committer *CommitSignature +} + +// RestoreCommitResult describes restore commit. +type RestoreCommitResult struct { + CommitSHA string + TreeSHA string + TargetBranch string + PackBytes int + RefUpdate RefUpdate +} + +// WebhookValidationOptions controls webhook validation. +type WebhookValidationOptions struct { + MaxAgeSeconds int +} + +// WebhookValidationResult describes signature validation. +type WebhookValidationResult struct { + Valid bool + Error string + Timestamp int64 + EventType string +} + +// WebhookValidation includes parsed payload when available. +type WebhookValidation struct { + WebhookValidationResult + Payload *WebhookEventPayload +} + +// ParsedWebhookSignature represents parsed signature header. +type ParsedWebhookSignature struct { + Timestamp string + Signature string +} + +// WebhookPushEvent describes a push webhook. +type WebhookPushEvent struct { + Type string + Repository WebhookRepository + Ref string + Before string + After string + CustomerID string + PushedAt time.Time + RawPushedAt string +} + +// WebhookRepository describes webhook repo. +type WebhookRepository struct { + ID string + URL string +} + +// WebhookUnknownEvent is a fallback for unknown events. +type WebhookUnknownEvent struct { + Type string + Raw []byte +} + +// WebhookEventPayload represents a validated event. +type WebhookEventPayload struct { + Push *WebhookPushEvent + Unknown *WebhookUnknownEvent +} + +// Repo represents a repository handle. +type Repo struct { + ID string + DefaultBranch string + client *Client +} + +// Client is the main Git Storage client. +type Client struct { + options Options + api *apiFetcher + privateKey *ecdsa.PrivateKey +} + +// GitStorage is an alias for Client to mirror SDK naming. +type GitStorage = Client + +// CodeStorage is an alias for Client. +type CodeStorage = Client diff --git a/packages/git-storage-sdk-go/util.go b/packages/git-storage-sdk-go/util.go new file mode 100644 index 000000000..871e87777 --- /dev/null +++ b/packages/git-storage-sdk-go/util.go @@ -0,0 +1,260 @@ +package storage + +import ( + "encoding/json" + "io" + "net/http" + "strconv" + "strings" + "time" +) + +func itoa(value int) string { + return strconv.Itoa(value) +} + +func boolToString(value bool) string { + if value { + return "true" + } + return "false" +} + +func decodeJSON(resp *http.Response, target interface{}) error { + decoder := json.NewDecoder(resp.Body) + return decoder.Decode(target) +} + +func parseTime(value string) time.Time { + if value == "" { + return time.Time{} + } + if parsed, err := time.Parse(time.RFC3339Nano, value); err == nil { + return parsed + } + if parsed, err := time.Parse(time.RFC3339, value); err == nil { + return parsed + } + return time.Time{} +} + +func normalizeDiffState(raw string) DiffFileState { + trimmed := strings.TrimSpace(raw) + if trimmed == "" { + return DiffStateUnknown + } + leading := strings.ToUpper(trimmed[:1]) + switch leading { + case "A": + return DiffStateAdded + case "M": + return DiffStateModified + case "D": + return DiffStateDeleted + case "R": + return DiffStateRenamed + case "C": + return DiffStateCopied + case "T": + return DiffStateTypeChanged + case "U": + return DiffStateUnmerged + default: + return DiffStateUnknown + } +} + +func transformBranchDiff(raw branchDiffResponse) GetBranchDiffResult { + result := GetBranchDiffResult{ + Branch: raw.Branch, + Base: raw.Base, + Stats: DiffStats{ + Files: raw.Stats.Files, + Additions: raw.Stats.Additions, + Deletions: raw.Stats.Deletions, + Changes: raw.Stats.Changes, + }, + } + + for _, file := range raw.Files { + result.Files = append(result.Files, FileDiff{ + Path: file.Path, + State: normalizeDiffState(file.State), + RawState: file.State, + OldPath: strings.TrimSpace(file.OldPath), + Raw: file.Raw, + Bytes: file.Bytes, + IsEOF: file.IsEOF, + }) + } + + for _, file := range raw.FilteredFiles { + result.FilteredFiles = append(result.FilteredFiles, FilteredFile{ + Path: file.Path, + State: normalizeDiffState(file.State), + RawState: file.State, + OldPath: strings.TrimSpace(file.OldPath), + Bytes: file.Bytes, + IsEOF: file.IsEOF, + }) + } + + return result +} + +func transformCommitDiff(raw commitDiffResponse) GetCommitDiffResult { + result := GetCommitDiffResult{ + SHA: raw.SHA, + Stats: DiffStats{ + Files: raw.Stats.Files, + Additions: raw.Stats.Additions, + Deletions: raw.Stats.Deletions, + Changes: raw.Stats.Changes, + }, + } + + for _, file := range raw.Files { + result.Files = append(result.Files, FileDiff{ + Path: file.Path, + State: normalizeDiffState(file.State), + RawState: file.State, + OldPath: strings.TrimSpace(file.OldPath), + Raw: file.Raw, + Bytes: file.Bytes, + IsEOF: file.IsEOF, + }) + } + + for _, file := range raw.FilteredFiles { + result.FilteredFiles = append(result.FilteredFiles, FilteredFile{ + Path: file.Path, + State: normalizeDiffState(file.State), + RawState: file.State, + OldPath: strings.TrimSpace(file.OldPath), + Bytes: file.Bytes, + IsEOF: file.IsEOF, + }) + } + + return result +} + +func parseNoteWriteResponse(resp *http.Response, method string) (NoteWriteResult, error) { + contentType := resp.Header.Get("content-type") + var rawBody []byte + var err error + + if resp.Body != nil { + rawBody, err = io.ReadAll(resp.Body) + if err != nil { + return NoteWriteResult{}, err + } + } + + if strings.Contains(contentType, "application/json") && len(rawBody) > 0 { + var payload noteWriteResponse + if err := json.Unmarshal(rawBody, &payload); err == nil && payload.SHA != "" { + return NoteWriteResult{ + SHA: payload.SHA, + TargetRef: payload.TargetRef, + BaseCommit: payload.BaseCommit, + NewRefSHA: payload.NewRefSHA, + Result: NoteResult{ + Success: payload.Result.Success, + Status: payload.Result.Status, + Message: payload.Result.Message, + }, + }, nil + } + + var env errorEnvelope + if err := json.Unmarshal(rawBody, &env); err == nil && strings.TrimSpace(env.Error) != "" { + return NoteWriteResult{}, &APIError{ + Message: strings.TrimSpace(env.Error), + Status: resp.StatusCode, + StatusText: resp.Status, + Method: method, + URL: resp.Request.URL.String(), + Body: env, + } + } + } + + fallback := "Request " + method + " " + resp.Request.URL.String() + " failed with status " + strconv.Itoa(resp.StatusCode) + " " + resp.Status + if len(rawBody) > 0 { + text := strings.TrimSpace(string(rawBody)) + if text != "" { + fallback = text + } + } + + return NoteWriteResult{}, &APIError{ + Message: fallback, + Status: resp.StatusCode, + StatusText: resp.Status, + Method: method, + URL: resp.Request.URL.String(), + Body: string(rawBody), + } +} + +type restoreCommitFailure struct { + Status string + Message string + RefUpdate *RefUpdate +} + +func parseRestoreCommitPayload(body []byte) (*restoreCommitAck, *restoreCommitFailure) { + var ack restoreCommitAck + if err := json.Unmarshal(body, &ack); err == nil { + if ack.Result.Success { + return &ack, nil + } + } + + var failure restoreCommitResponse + if err := json.Unmarshal(body, &failure); err == nil { + return nil, &restoreCommitFailure{ + Status: strings.TrimSpace(failure.Result.Status), + Message: strings.TrimSpace(failure.Result.Message), + RefUpdate: partialRefUpdate(failure.Result.Branch, failure.Result.OldSHA, failure.Result.NewSHA), + } + } + + return nil, nil +} + +func buildRestoreCommitResult(ack restoreCommitAck) (RestoreCommitResult, error) { + refUpdate := RefUpdate{ + Branch: ack.Result.Branch, + OldSHA: ack.Result.OldSHA, + NewSHA: ack.Result.NewSHA, + } + + if !ack.Result.Success { + message := ack.Result.Message + if strings.TrimSpace(message) == "" { + message = "Restore commit failed with status " + ack.Result.Status + } + return RestoreCommitResult{}, newRefUpdateError(message, ack.Result.Status, &refUpdate) + } + + return RestoreCommitResult{ + CommitSHA: ack.Commit.CommitSHA, + TreeSHA: ack.Commit.TreeSHA, + TargetBranch: ack.Commit.TargetBranch, + PackBytes: ack.Commit.PackBytes, + RefUpdate: refUpdate, + }, nil +} + +func httpStatusToRestoreStatus(status int) string { + switch status { + case 409: + return "conflict" + case 412: + return "precondition_failed" + default: + return strconv.Itoa(status) + } +} diff --git a/packages/git-storage-sdk-go/version.go b/packages/git-storage-sdk-go/version.go new file mode 100644 index 000000000..ae507cb71 --- /dev/null +++ b/packages/git-storage-sdk-go/version.go @@ -0,0 +1,8 @@ +package storage + +const PackageName = "code-storage-go-sdk" +const PackageVersion = "0.0.0" + +func userAgent() string { + return PackageName + "/" + PackageVersion +} diff --git a/packages/git-storage-sdk-go/webhook.go b/packages/git-storage-sdk-go/webhook.go new file mode 100644 index 000000000..75fa139fb --- /dev/null +++ b/packages/git-storage-sdk-go/webhook.go @@ -0,0 +1,171 @@ +package storage + +import ( + "crypto/hmac" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "errors" + "net/http" + "strconv" + "strings" + "time" +) + +const defaultWebhookMaxAgeSeconds = 300 + +// ParseSignatureHeader parses the X-Pierre-Signature header. +func ParseSignatureHeader(header string) *ParsedWebhookSignature { + header = strings.TrimSpace(header) + if header == "" { + return nil + } + + var timestamp string + var signature string + + parts := strings.Split(header, ",") + for _, part := range parts { + kv := strings.SplitN(strings.TrimSpace(part), "=", 2) + if len(kv) != 2 { + continue + } + switch kv[0] { + case "t": + timestamp = kv[1] + case "sha256": + signature = kv[1] + } + } + + if timestamp == "" || signature == "" { + return nil + } + + return &ParsedWebhookSignature{Timestamp: timestamp, Signature: signature} +} + +// ValidateWebhookSignature validates the HMAC signature and timestamp. +func ValidateWebhookSignature(payload []byte, signatureHeader string, secret string, options WebhookValidationOptions) WebhookValidationResult { + if strings.TrimSpace(secret) == "" { + return WebhookValidationResult{Valid: false, Error: "Empty secret is not allowed"} + } + + parsed := ParseSignatureHeader(signatureHeader) + if parsed == nil { + return WebhookValidationResult{Valid: false, Error: "Invalid signature header format"} + } + + timestamp, err := strconv.ParseInt(parsed.Timestamp, 10, 64) + if err != nil { + return WebhookValidationResult{Valid: false, Error: "Invalid timestamp in signature"} + } + + maxAge := options.MaxAgeSeconds + if maxAge == 0 { + maxAge = defaultWebhookMaxAgeSeconds + } + if maxAge > 0 { + now := time.Now().Unix() + age := now - timestamp + if age > int64(maxAge) { + return WebhookValidationResult{Valid: false, Error: "Webhook timestamp too old (" + strconv.FormatInt(age, 10) + " seconds)", Timestamp: timestamp} + } + if age < -60 { + return WebhookValidationResult{Valid: false, Error: "Webhook timestamp is in the future", Timestamp: timestamp} + } + } + + signedData := parsed.Timestamp + "." + string(payload) + mac := hmac.New(sha256.New, []byte(secret)) + _, _ = mac.Write([]byte(signedData)) + expected := mac.Sum(nil) + provided, err := hex.DecodeString(parsed.Signature) + if err != nil { + return WebhookValidationResult{Valid: false, Error: "Invalid signature", Timestamp: timestamp} + } + + if len(expected) != len(provided) || !hmac.Equal(expected, provided) { + return WebhookValidationResult{Valid: false, Error: "Invalid signature", Timestamp: timestamp} + } + + return WebhookValidationResult{Valid: true, Timestamp: timestamp} +} + +// ValidateWebhook validates the webhook signature and parses the payload. +func ValidateWebhook(payload []byte, headers http.Header, secret string, options WebhookValidationOptions) WebhookValidation { + signatureHeader := headers.Get("X-Pierre-Signature") + if signatureHeader == "" { + signatureHeader = headers.Get("x-pierre-signature") + } + if signatureHeader == "" { + return WebhookValidation{WebhookValidationResult: WebhookValidationResult{Valid: false, Error: "Missing or invalid X-Pierre-Signature header"}} + } + + eventType := headers.Get("X-Pierre-Event") + if eventType == "" { + eventType = headers.Get("x-pierre-event") + } + if eventType == "" { + return WebhookValidation{WebhookValidationResult: WebhookValidationResult{Valid: false, Error: "Missing or invalid X-Pierre-Event header"}} + } + + validation := ValidateWebhookSignature(payload, signatureHeader, secret, options) + if !validation.Valid { + return WebhookValidation{WebhookValidationResult: validation} + } + + validation.EventType = eventType + + var raw json.RawMessage + if err := json.Unmarshal(payload, &raw); err != nil { + validation.Valid = false + validation.Error = "Invalid JSON payload" + return WebhookValidation{WebhookValidationResult: validation} + } + + converted, err := convertWebhookPayload(eventType, payload) + if err != nil { + validation.Valid = false + validation.Error = err.Error() + return WebhookValidation{WebhookValidationResult: validation} + } + + return WebhookValidation{WebhookValidationResult: validation, Payload: &converted} +} + +type rawWebhookPushEvent struct { + Repository struct { + ID string `json:"id"` + URL string `json:"url"` + } `json:"repository"` + Ref string `json:"ref"` + Before string `json:"before"` + After string `json:"after"` + CustomerID string `json:"customer_id"` + PushedAt string `json:"pushed_at"` +} + +func convertWebhookPayload(eventType string, payload []byte) (WebhookEventPayload, error) { + if eventType == "push" { + var raw rawWebhookPushEvent + if err := json.Unmarshal(payload, &raw); err != nil { + return WebhookEventPayload{}, err + } + if raw.Repository.ID == "" || raw.Repository.URL == "" || raw.Ref == "" || raw.Before == "" || raw.After == "" || raw.CustomerID == "" || raw.PushedAt == "" { + return WebhookEventPayload{}, errors.New("Invalid push payload") + } + return WebhookEventPayload{Push: &WebhookPushEvent{ + Type: "push", + Repository: WebhookRepository{ID: raw.Repository.ID, URL: raw.Repository.URL}, + Ref: raw.Ref, + Before: raw.Before, + After: raw.After, + CustomerID: raw.CustomerID, + PushedAt: parseTime(raw.PushedAt), + RawPushedAt: raw.PushedAt, + }}, nil + } + + return WebhookEventPayload{Unknown: &WebhookUnknownEvent{Type: eventType, Raw: payload}}, nil +} diff --git a/packages/git-storage-sdk-node/.gitignore b/packages/git-storage-sdk-node/.gitignore new file mode 100644 index 000000000..8ca49a2e8 --- /dev/null +++ b/packages/git-storage-sdk-node/.gitignore @@ -0,0 +1,4 @@ +dist/ +node_modules/ +*.log +.DS_Store \ No newline at end of file diff --git a/packages/git-storage-sdk-node/AGENTS.md b/packages/git-storage-sdk-node/AGENTS.md new file mode 100644 index 000000000..30f91cad2 --- /dev/null +++ b/packages/git-storage-sdk-node/AGENTS.md @@ -0,0 +1,68 @@ +# Git Storage SDK – Agent Notes + +This package (`@pierre/storage`) is published publicly and ships to external customers. Treat the +repository as production-critical: follow semver expectations, avoid breaking changes without +coordination, and keep documentation in sync with code. + +## Package Purpose + +- TypeScript/ESM + CommonJS SDK for Pierre’s Git storage APIs. +- Generates authenticated Git remote URLs and wraps REST endpoints for repo management, diff + retrieval, commit packs, and branch restore operations. +- Distributed via npm; consumers rely on the generated `dist` output. + +## Build & Test + +- Build: `pnpm --filter @pierre/storage build` (tsup with `tsconfig.tsup.json`). +- Tests: `pnpm --filter @pierre/storage exec vitest --run`. +- Full end-to-end smoke test hitting Pierre environments: + `node packages/git-storage-sdk/tests/full-workflow.js -e production -s pierre -k /home/ian/pierre-prod-key.pem` + (change the `-e`, `-s`, and key path for your setup). The script provisions a repo, commits + changes, and exercises diff/list APIs via the SDK. + +## Key Files + +- `src/index.ts`: Public entry point (Repo/GitStorage classes). +- `src/types.ts`: Shared type definitions; keep in sync with gateway JSON. +- `tests/index.test.ts`: Unit coverage for API surface (uses mocked fetch). +- `tests/full-workflow.js`: Live workflow script exercising real services. +- `tsup.config.ts`: Declares build-time constants (API/STORAGE base URLs). + +## Development Notes + +- `resolveCommitTtlSeconds` default TTL = 1 hour unless overridden. +- Use `DEFAULT_TOKEN_TTL_SECONDS` for 1-hour defaults (avoid hard-coded `1 * 60 * 60`). +- `Repo.restoreCommit` streams metadata to `repos/restore-commit`. Legacy `restore-commits` and + `reset-commits` endpoints remain deployed but the SDK no longer auto-falls back; callers must hit + those routes explicitly if needed. +- Commit builder (`createCommit().send()`) and `Repo.restoreCommit` throw `RefUpdateError` when the + backend rejects a ref update; keep status/reason/message/ref mapping intact. +- `Repo.createCommitFromDiff` streams pre-generated patches to `repos/diff-commit` (accepts the diff + payload directly and returns a `Promise`). It shares the same `RefUpdateError` + semantics—reuse the commit-pack helpers when adjusting error handling. +- Authentication relies on ES256 private key; see README for sample key. +- When adjusting request/response shapes, reflect changes in both TypeScript types and README. +- Avoid importing Node built-ins that break browser usage; the SDK is intended for Node + edge + runtimes with fetch available. +- Maintain the Result vs Response distinction: raw API payloads remain `*Response` while SDK + consumers receive camelCase `*Result` objects. Update both transformer utilities and docs + together. +- Diff responses normalize the Git status via `normalizeDiffState`, exposing both `state` and + `rawState`; extend that mapping instead of passing raw enums through directly. +- Webhook validation returns typed push events (parsed `Date`, camelCase fields) or a + `WebhookUnknownEvent` fallback—keep this discriminated union intact when adding new events. +- Commit and commit-list APIs convert timestamps to `Date` while preserving `rawDate`; apply the + same pattern to future time fields. +- Commit builder accepts `Blob`, `File`, `ReadableStream`, and iterable sources; new sources should + be funneled through `toAsyncIterable`/`ensureUint8Array`. +- `CommitFileOptions.mode` is restricted to `GitFileMode` literals; ensure additional modes are + codified there. +- `CommitTextFileOptions.encoding` supports Node `Buffer` encodings and defaults to UTF-8; retain + the Buffer-based fallback for non-UTF encodings. + +## Release Checklist + +- Ensure `pnpm test` and `build` succeed. +- Update version in `package.json` when shipping changes. +- Verify README snippets remain accurate. +- Communicate breaking changes to customer-facing channels. diff --git a/packages/git-storage-sdk-node/CLAUDE.md b/packages/git-storage-sdk-node/CLAUDE.md new file mode 120000 index 000000000..47dc3e3d8 --- /dev/null +++ b/packages/git-storage-sdk-node/CLAUDE.md @@ -0,0 +1 @@ +AGENTS.md \ No newline at end of file diff --git a/packages/git-storage-sdk-node/README.md b/packages/git-storage-sdk-node/README.md new file mode 100644 index 000000000..76eb3774d --- /dev/null +++ b/packages/git-storage-sdk-node/README.md @@ -0,0 +1,676 @@ +# @pierre/storage + +Pierre Git Storage SDK for TypeScript/JavaScript applications. + +## End-to-End Smoke Test + +- `node packages/git-storage-sdk/tests/full-workflow.js -e production -s pierre -k /home/ian/pierre-prod-key.pem` + Drives + the Pierre workflow via the SDK: creates a repository, writes commits, fetches branch and diff + data, and confirms storage APIs. Swap in your own private key path when running outside this + workstation and adjust `-e`/`-s` for non-production environments. + +## Installation + +```bash +npm install @pierre/storage +``` + +## Usage + +### Basic Setup + +```typescript +import { GitStorage } from '@pierre/storage'; + +// Initialize the client with your name and key +const store = new GitStorage({ + name: 'your-name', // e.g., 'v0' + key: 'your-key', // Your API key +}); +``` + +### Creating a Repository + +```typescript +// Create a new repository with auto-generated ID +const repo = await store.createRepo(); +console.log(repo.id); // e.g., '123e4567-e89b-12d3-a456-426614174000' + +// Create a repository with custom ID +const customRepo = await store.createRepo({ id: 'my-custom-repo' }); +console.log(customRepo.id); // 'my-custom-repo' + +// Create a repository by forking an existing repo +const forkedRepo = await store.createRepo({ + id: 'my-fork', // optional + baseRepo: { + id: 'my-template-id', + ref: 'main', // optional + }, +}); +// If defaultBranch is omitted, the SDK returns "main". +``` + +### Finding a Repository + +```typescript +const foundRepo = await store.findOne({ id: 'repo-id' }); +if (foundRepo) { + const url = await foundRepo.getRemoteURL(); + console.log(`Repository URL: ${url}`); +} +``` + +### Grep + +```typescript +const result = await foundRepo.grep({ + ref: 'main', + query: { pattern: 'TODO', caseSensitive: true }, + paths: ['src/'], +}); +console.log(result.matches); +``` + +### Getting Remote URLs + +The SDK generates secure URLs with JWT authentication for Git operations: + +```typescript +// Get URL with default permissions (git:write, git:read) and 1-year TTL +const url = await repo.getRemoteURL(); +// Returns: https://t:JWT@your-name.code.storage/repo-id.git + +// Configure the Git remote +console.log(`Run: git remote add origin ${url}`); + +// Get URL with custom permissions and TTL +const readOnlyUrl = await repo.getRemoteURL({ + permissions: ['git:read'], // Read-only access + ttl: 3600, // 1 hour in seconds +}); + +// Available permissions: +// - 'git:read' - Read access to Git repository +// - 'git:write' - Write access to Git repository +// - 'repo:write' - Create a repository +``` + +#### Ephemeral Branches + +For working with ephemeral branches (temporary branches isolated from the main repository), use +`getEphemeralRemote()`: + +```typescript +// Get ephemeral namespace remote URL +const ephemeralUrl = await repo.getEphemeralRemoteURL(); +// Returns: https://t:JWT@your-name.code.storage/repo-id+ephemeral.git + +// Configure separate remotes for default and ephemeral branches +console.log(`Run: git remote add origin ${await repo.getRemoteURL()}`); +console.log(`Run: git remote add ephemeral ${await repo.getEphemeralRemoteURL()}`); + +// Push ephemeral branch +// git push ephemeral feature-branch + +// The ephemeral remote supports all the same options and permission as regular remotes +``` + +### Working with Repository Content + +Once you have a repository instance, you can perform various Git operations: + +```typescript +const repo = await store.createRepo(); +// or +const repo = await store.findOne({ id: 'existing-repo-id' }); + +// List repositories for the org +const repos = await store.listRepos({ limit: 20 }); +console.log(repos.repos); + +// Get file content (streaming) +const resp = await repo.getFileStream({ + path: 'README.md', + ref: 'main', // optional, defaults to default branch +}); +const text = await resp.text(); +console.log(text); + +// List all files in the repository +const files = await repo.listFiles({ + ref: 'main', // optional, defaults to default branch +}); +console.log(files.paths); // Array of file paths + +// List branches +const branches = await repo.listBranches({ + limit: 10, + cursor: undefined, // for pagination +}); +console.log(branches.branches); + +// List commits +const commits = await repo.listCommits({ + branch: 'main', // optional + limit: 20, + cursor: undefined, // for pagination +}); +console.log(commits.commits); + +// Read a git note for a commit +const note = await repo.getNote({ sha: 'abc123...' }); +console.log(note.note); + +// Add a git note +const noteResult = await repo.createNote({ + sha: 'abc123...', + note: 'Release QA approved', + author: { name: 'Release Bot', email: 'release@example.com' }, +}); +console.log(noteResult.newRefSha); + +// Append to an existing git note +await repo.appendNote({ + sha: 'abc123...', + note: 'Follow-up review complete', +}); + +// Delete a git note +await repo.deleteNote({ sha: 'abc123...' }); + +// Get branch diff +const branchDiff = await repo.getBranchDiff({ + branch: 'feature-branch', + base: 'main', // optional, defaults to main +}); +console.log(branchDiff.stats); +console.log(branchDiff.files); + +// Get commit diff +const commitDiff = await repo.getCommitDiff({ + sha: 'abc123...', +}); +console.log(commitDiff.stats); +console.log(commitDiff.files); + +// Create a new branch from an existing one +const branch = await repo.createBranch({ + baseBranch: 'main', + targetBranch: 'feature/demo', + // baseIsEphemeral: true, + // targetIsEphemeral: true, +}); +console.log(branch.targetBranch, branch.commitSha); + +// Create a commit using the streaming helper +const fs = await import('node:fs/promises'); +const result = await repo + .createCommit({ + targetBranch: 'main', + commitMessage: 'Update docs', + author: { name: 'Docs Bot', email: 'docs@example.com' }, + }) + .addFileFromString('docs/changelog.md', '# v2.0.2\n- add streaming SDK\n') + .addFile('docs/readme.md', await fs.readFile('README.md')) + .deletePath('docs/legacy.txt') + .send(); + +console.log(result.commitSha); +console.log(result.refUpdate.newSha); +console.log(result.refUpdate.oldSha); // All zeroes when the ref is created +``` + +The builder exposes: + +- `addFile(path, source, options)` to attach bytes from strings, typed arrays, ArrayBuffers, `Blob` + or `File` objects, `ReadableStream`s, or iterable/async-iterable sources. +- `addFileFromString(path, contents, options)` for text helpers (defaults to UTF-8 and accepts any + Node.js `BufferEncoding`). +- `deletePath(path)` to remove files or folders. +- `send()` to finalize the commit and receive metadata about the new commit. + +`send()` resolves to an object with the new commit metadata plus the ref update: + +```ts +type CommitResult = { + commitSha: string; + treeSha: string; + targetBranch: string; + packBytes: number; + blobCount: number; + refUpdate: { + branch: string; + oldSha: string; // All zeroes when the ref is created + newSha: string; + }; +}; +``` + +If the backend reports a failure (for example, the branch advanced past `expectedHeadSha`) the +builder throws a `RefUpdateError` containing the status, reason, and ref details. + +**Options** + +- `targetBranch` (required): Branch name (for example `main`) that will receive the commit. +- `expectedHeadSha` (optional): Commit SHA that must match the remote tip; omit to fast-forward + unconditionally. +- `baseBranch` (optional): Mirrors the `base_branch` metadata and names an existing branch whose tip + should seed `targetBranch` if it does not exist. Leave `expectedHeadSha` empty when creating a new + branch from `baseBranch`; when both are provided and the branch already exists, `expectedHeadSha` + continues to enforce the fast-forward guard. +- `commitMessage` (required): The commit message. +- `author` (required): Include `name` and `email` for the commit author. +- `committer` (optional): Include `name` and `email`. If omitted, the author identity is reused. +- `signal` (optional): Abort an in-flight upload with `AbortController`. +- `targetRef` (deprecated, optional): Fully qualified ref (for example `refs/heads/main`). Prefer + `targetBranch`, which now accepts plain branch names. + +> Files are chunked into 4 MiB segments under the hood, so you can stream large assets without +> buffering them entirely in memory. File paths are normalized relative to the repository root. + +> The `targetBranch` must already exist on the remote repository unless you provide `baseBranch` (or +> the repository has no refs). To seed an empty repository, point to the default branch and omit +> `expectedHeadSha`. To create a missing branch within an existing repository, set `baseBranch` to +> the source branch and omit `expectedHeadSha` so the service clones that tip before applying your +> changes. + +### Apply a pre-generated diff + +If you already have a patch (for example, the output of `git diff --binary`) you can stream it to +the gateway with a single call. The SDK handles chunking and NDJSON streaming just like it does for +regular commits: + +```ts +const fs = await import('node:fs/promises'); + +const patch = await fs.readFile('build/generated.patch', 'utf8'); + +const diffResult = await repo.createCommitFromDiff({ + targetBranch: 'feature/apply-diff', + expectedHeadSha: 'abc123def4567890abc123def4567890abc12345', + commitMessage: 'Apply generated API changes', + author: { name: 'Diff Bot', email: 'diff@example.com' }, + diff: patch, +}); + +console.log(diffResult.commitSha); +console.log(diffResult.refUpdate.newSha); +``` + +The `diff` field accepts a `string`, `Uint8Array`, `ArrayBuffer`, `Blob`, `File`, `ReadableStream`, +iterable, or async iterable of byte chunks—the same sources supported by the standard commit +builder. `createCommitFromDiff` returns a `Promise` and throws a `RefUpdateError` when +the server rejects the diff (for example, if the branch tip changed). + +### Streaming Large Files + +The commit builder accepts any async iterable of bytes, so you can stream large assets without +buffering: + +```typescript +import { createReadStream } from 'node:fs'; + +await repo + .createCommit({ + targetBranch: 'assets', + expectedHeadSha: 'abc123def4567890abc123def4567890abc12345', + commitMessage: 'Upload latest design bundle', + author: { name: 'Assets Uploader', email: 'assets@example.com' }, + }) + .addFile('assets/design-kit.zip', createReadStream('/tmp/design-kit.zip')) + .send(); +``` + +## API Reference + +### GitStorage + +```typescript +class GitStorage { + constructor(options: GitStorageOptions); + async createRepo(options?: CreateRepoOptions): Promise; + async findOne(options: FindOneOptions): Promise; + getConfig(): GitStorageOptions; +} +``` + +### Interfaces + +```typescript +interface GitStorageOptions { + name: string; // Your identifier + key: string; // Your API key + defaultTTL?: number; // Default TTL for generated JWTs (seconds) +} + +interface CreateRepoOptions { + id?: string; // Optional custom repository ID + baseRepo?: + | { + id: string; // Fork source repo ID + ref?: string; // Optional ref to fork from + sha?: string; // Optional commit SHA to fork from + } + | { + owner: string; // GitHub owner + name: string; // GitHub repository name + defaultBranch?: string; + provider?: 'github'; + }; + defaultBranch?: string; // Optional default branch name (defaults to "main") +} + +interface FindOneOptions { + id: string; // Repository ID to find +} + +interface Repo { + id: string; + getRemoteURL(options?: GetRemoteURLOptions): Promise; + getEphemeralRemoteURL(options?: GetRemoteURLOptions): Promise; + + getFileStream(options: GetFileOptions): Promise; + listFiles(options?: ListFilesOptions): Promise; + listBranches(options?: ListBranchesOptions): Promise; + listCommits(options?: ListCommitsOptions): Promise; + getNote(options: GetNoteOptions): Promise; + createNote(options: CreateNoteOptions): Promise; + appendNote(options: AppendNoteOptions): Promise; + deleteNote(options: DeleteNoteOptions): Promise; + getBranchDiff(options: GetBranchDiffOptions): Promise; + getCommitDiff(options: GetCommitDiffOptions): Promise; +} + +interface GetRemoteURLOptions { + permissions?: ('git:write' | 'git:read' | 'repo:write')[]; + ttl?: number; // Time to live in seconds (default: 31536000 = 1 year) +} + +// Git operation interfaces +interface GetFileOptions { + path: string; + ref?: string; // Branch, tag, or commit SHA + ttl?: number; +} + +// getFileStream() returns a standard Fetch Response for streaming bytes + +interface ListFilesOptions { + ref?: string; // Branch, tag, or commit SHA + ttl?: number; +} + +interface ListFilesResponse { + paths: string[]; // Array of file paths + ref: string; // The resolved reference +} + +interface ListFilesResult { + paths: string[]; + ref: string; +} + +interface GetNoteOptions { + sha: string; // Commit SHA to look up notes for + ttl?: number; +} + +interface GetNoteResult { + sha: string; + note: string; + refSha: string; +} + +interface CreateNoteOptions { + sha: string; + note: string; + expectedRefSha?: string; + author?: { name: string; email: string }; + ttl?: number; +} + +interface AppendNoteOptions { + sha: string; + note: string; + expectedRefSha?: string; + author?: { name: string; email: string }; + ttl?: number; +} + +interface DeleteNoteOptions { + sha: string; + expectedRefSha?: string; + author?: { name: string; email: string }; + ttl?: number; +} + +interface NoteWriteResult { + sha: string; + targetRef: string; + baseCommit?: string; + newRefSha: string; + result: { + success: boolean; + status: string; + message?: string; + }; +} + +interface ListBranchesOptions { + cursor?: string; + limit?: number; + ttl?: number; +} + +interface ListBranchesResponse { + branches: BranchInfo[]; + nextCursor?: string; + hasMore: boolean; +} + +interface ListBranchesResult { + branches: BranchInfo[]; + nextCursor?: string; + hasMore: boolean; +} + +interface BranchInfo { + cursor: string; + name: string; + headSha: string; + createdAt: string; +} + +interface ListCommitsOptions { + branch?: string; + cursor?: string; + limit?: number; + ttl?: number; +} + +interface ListCommitsResponse { + commits: CommitInfo[]; + nextCursor?: string; + hasMore: boolean; +} + +interface ListCommitsResult { + commits: CommitInfo[]; + nextCursor?: string; + hasMore: boolean; +} + +interface CommitInfo { + sha: string; + message: string; + authorName: string; + authorEmail: string; + committerName: string; + committerEmail: string; + date: Date; + rawDate: string; +} + +interface GetBranchDiffOptions { + branch: string; + base?: string; // Defaults to 'main' + ttl?: number; + ephemeral?: boolean; + ephemeralBase?: boolean; +} + +interface GetCommitDiffOptions { + sha: string; + ttl?: number; +} + +interface GetBranchDiffResponse { + branch: string; + base: string; + stats: DiffStats; + files: FileDiff[]; + filteredFiles: FilteredFile[]; +} + +interface GetBranchDiffResult { + branch: string; + base: string; + stats: DiffStats; + files: FileDiff[]; + filteredFiles: FilteredFile[]; +} + +interface GetCommitDiffResponse { + sha: string; + stats: DiffStats; + files: FileDiff[]; + filteredFiles: FilteredFile[]; +} + +interface GetCommitDiffResult { + sha: string; + stats: DiffStats; + files: FileDiff[]; + filteredFiles: FilteredFile[]; +} +interface DiffStats { + files: number; + additions: number; + deletions: number; + changes: number; +} + +type DiffFileState = + | 'added' + | 'modified' + | 'deleted' + | 'renamed' + | 'copied' + | 'type_changed' + | 'unmerged' + | 'unknown'; + +interface DiffFileBase { + path: string; + state: DiffFileState; + rawState: string; + oldPath?: string; + bytes: number; + isEof: boolean; +} + +interface FileDiff { + path: string; + state: DiffFileState; + rawState: string; + oldPath?: string; + bytes: number; + isEof: boolean; + raw: string; +} + +interface FilteredFile { + path: string; + state: DiffFileState; + rawState: string; + oldPath?: string; + bytes: number; + isEof: boolean; +} + +interface RefUpdate { + branch: string; + oldSha: string; + newSha: string; +} + +interface CommitResult { + commitSha: string; + treeSha: string; + targetBranch: string; + packBytes: number; + blobCount: number; + refUpdate: RefUpdate; +} + +interface RestoreCommitOptions { + targetBranch: string; + targetCommitSha: string; + commitMessage?: string; + expectedHeadSha?: string; + author: CommitSignature; + committer?: CommitSignature; +} + +interface RestoreCommitResult { + commitSha: string; + treeSha: string; + targetBranch: string; + packBytes: number; + refUpdate: { + branch: string; + oldSha: string; + newSha: string; + }; +} +``` + +## Authentication + +The SDK uses JWT (JSON Web Tokens) for authentication. When you call `getRemoteURL()`, it: + +1. Creates a JWT with your name, repository ID, and requested permissions +2. Signs it with your key +3. Embeds it in the Git remote URL as the password + +The generated URLs are compatible with standard Git clients and include all necessary +authentication. + +## Error Handling + +The SDK validates inputs and provides helpful error messages: + +```typescript +try { + const store = new GitStorage({ name: '', key: 'key' }); +} catch (error) { + // Error: GitStorage name must be a non-empty string. +} + +try { + const store = new GitStorage({ name: 'v0', key: '' }); +} catch (error) { + // Error: GitStorage key must be a non-empty string. +} +- +``` + +- Mutating operations (commit builder, `restoreCommit`) throw `RefUpdateError` when the backend + reports a ref failure. Inspect `error.status`, `error.reason`, `error.message`, and + `error.refUpdate` for details. + +## License + +MIT diff --git a/packages/git-storage-sdk-node/moon.yml b/packages/git-storage-sdk-node/moon.yml new file mode 100644 index 000000000..e36740b70 --- /dev/null +++ b/packages/git-storage-sdk-node/moon.yml @@ -0,0 +1,44 @@ +type: 'library' +language: 'typescript' + +project: + name: 'git-storage-sdk-node' + description: 'Pierre Git Storage SDK for external npm publishing' + +fileGroups: + sourceInputs: + - 'src/**/*' + configInputs: + - 'tsup.config.ts' + - 'tsconfig.json' + - 'tsconfig.*.json' + outputs: + - 'dist/**/*' + +tasks: + build: + command: + - 'tsup' + inputs: + - '@globs(configInputs)' + - '@globs(sourceInputs)' + outputs: + - '@globs(outputs)' + options: + runInCI: true + cache: true + mergeArgs: replace + mergeInputs: replace + mergeOutputs: replace + + dev: + command: + - 'tsup' + - '--watch' + inputs: + - '@globs(configInputs)' + - '@globs(sourceInputs)' + options: + cache: false + persistent: true + runInCI: false diff --git a/packages/git-storage-sdk-node/package.json b/packages/git-storage-sdk-node/package.json new file mode 100644 index 000000000..c280e2ce9 --- /dev/null +++ b/packages/git-storage-sdk-node/package.json @@ -0,0 +1,40 @@ +{ + "name": "@pierre/storage", + "version": "0.9.3", + "description": "Pierre Git Storage SDK", + "license": "MIT", + "type": "module", + "main": "./dist/index.cjs", + "module": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + ".": { + "types": "./dist/index.d.ts", + "import": "./dist/index.js", + "require": "./dist/index.cjs", + "default": "./dist/index.js" + } + }, + "files": [ + "dist", + "src" + ], + "scripts": { + "build": "tsup", + "dev": "tsup --watch", + "prepublishOnly": "pnpm build" + }, + "dependencies": { + "jose": "^5.10.0", + "snakecase-keys": "^9.0.2", + "zod": "^3.23.8" + }, + "devDependencies": { + "tsup": "8.5.0", + "typescript": "5.8.3", + "vitest": "3.2.4" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/packages/git-storage-sdk-node/src/commit-pack.ts b/packages/git-storage-sdk-node/src/commit-pack.ts new file mode 100644 index 000000000..5576190ed --- /dev/null +++ b/packages/git-storage-sdk-node/src/commit-pack.ts @@ -0,0 +1,128 @@ +import { inferRefUpdateReason, RefUpdateError } from './errors'; +import type { CommitPackAckRaw } from './schemas'; +import { commitPackResponseSchema, errorEnvelopeSchema } from './schemas'; +import type { CommitResult, RefUpdate } from './types'; + +export type CommitPackAck = CommitPackAckRaw; + +export function buildCommitResult(ack: CommitPackAckRaw): CommitResult { + const refUpdate = toRefUpdate(ack.result); + if (!ack.result.success) { + throw new RefUpdateError( + ack.result.message ?? `Commit failed with status ${ack.result.status}`, + { + status: ack.result.status, + message: ack.result.message, + refUpdate, + }, + ); + } + return { + commitSha: ack.commit.commit_sha, + treeSha: ack.commit.tree_sha, + targetBranch: ack.commit.target_branch, + packBytes: ack.commit.pack_bytes, + blobCount: ack.commit.blob_count, + refUpdate, + }; +} + +export function toRefUpdate(result: CommitPackAckRaw['result']): RefUpdate { + return { + branch: result.branch, + oldSha: result.old_sha, + newSha: result.new_sha, + }; +} + +export async function parseCommitPackError( + response: Response, + fallbackMessage: string, +): Promise<{ + statusMessage: string; + statusLabel: string; + refUpdate?: Partial; +}> { + const cloned = response.clone(); + let jsonBody: unknown; + try { + jsonBody = await cloned.json(); + } catch { + jsonBody = undefined; + } + + let textBody: string | undefined; + if (jsonBody === undefined) { + try { + textBody = await response.text(); + } catch { + textBody = undefined; + } + } + + const defaultStatus = (() => { + const inferred = inferRefUpdateReason(String(response.status)); + return inferred === 'unknown' ? 'failed' : inferred; + })(); + let statusLabel = defaultStatus; + let refUpdate: Partial | undefined; + let message: string | undefined; + + if (jsonBody !== undefined) { + const parsedResponse = commitPackResponseSchema.safeParse(jsonBody); + if (parsedResponse.success) { + const result = parsedResponse.data.result; + if (typeof result.status === 'string' && result.status.trim() !== '') { + statusLabel = result.status.trim() as typeof statusLabel; + } + refUpdate = toPartialRefUpdateFields(result.branch, result.old_sha, result.new_sha); + if (typeof result.message === 'string' && result.message.trim() !== '') { + message = result.message.trim(); + } + } + + if (!message) { + const parsedError = errorEnvelopeSchema.safeParse(jsonBody); + if (parsedError.success) { + const trimmed = parsedError.data.error.trim(); + if (trimmed) { + message = trimmed; + } + } + } + } + + if (!message && typeof jsonBody === 'string' && jsonBody.trim() !== '') { + message = jsonBody.trim(); + } + + if (!message && textBody && textBody.trim() !== '') { + message = textBody.trim(); + } + + return { + statusMessage: message ?? fallbackMessage, + statusLabel, + refUpdate, + }; +} + +function toPartialRefUpdateFields( + branch?: string | null, + oldSha?: string | null, + newSha?: string | null, +): Partial | undefined { + const refUpdate: Partial = {}; + + if (typeof branch === 'string' && branch.trim() !== '') { + refUpdate.branch = branch.trim(); + } + if (typeof oldSha === 'string' && oldSha.trim() !== '') { + refUpdate.oldSha = oldSha.trim(); + } + if (typeof newSha === 'string' && newSha.trim() !== '') { + refUpdate.newSha = newSha.trim(); + } + + return Object.keys(refUpdate).length > 0 ? refUpdate : undefined; +} diff --git a/packages/git-storage-sdk-node/src/commit.ts b/packages/git-storage-sdk-node/src/commit.ts new file mode 100644 index 000000000..30ed6ed48 --- /dev/null +++ b/packages/git-storage-sdk-node/src/commit.ts @@ -0,0 +1,434 @@ +import { buildCommitResult, parseCommitPackError } from './commit-pack'; +import { RefUpdateError } from './errors'; +import type { CommitPackAckRaw } from './schemas'; +import { commitPackAckSchema } from './schemas'; +import { + base64Encode, + type ChunkSegment, + chunkify, + requiresDuplex, + toAsyncIterable, + toRequestBody, +} from './stream-utils'; +import type { + CommitBuilder, + CommitFileOptions, + CommitFileSource, + CommitResult, + CommitSignature, + CommitTextFileOptions, + CreateCommitOptions, + LegacyCreateCommitOptions, +} from './types'; +import { getUserAgent } from './version'; + +const DEFAULT_TTL_SECONDS = 60 * 60; +const HEADS_REF_PREFIX = 'refs/heads/'; + +type NodeBuffer = Uint8Array & { toString(encoding?: string): string }; +interface NodeBufferConstructor { + from(data: Uint8Array): NodeBuffer; + from(data: string, encoding?: string): NodeBuffer; + isBuffer(value: unknown): value is NodeBuffer; +} + +const BufferCtor: NodeBufferConstructor | undefined = ( + globalThis as { Buffer?: NodeBufferConstructor } +).Buffer; + +interface CommitMetadataPayload { + target_branch: string; + expected_head_sha?: string; + base_branch?: string; + commit_message: string; + ephemeral?: boolean; + ephemeral_base?: boolean; + author: { + name: string; + email: string; + }; + committer?: { + name: string; + email: string; + }; + files: Array<{ + path: string; + content_id: string; + operation?: 'upsert' | 'delete'; + mode?: string; + }>; +} + +interface CommitTransportRequest { + authorization: string; + signal?: AbortSignal; + metadata: CommitMetadataPayload; + blobs: Array<{ contentId: string; chunks: AsyncIterable }>; +} + +interface CommitTransport { + send(request: CommitTransportRequest): Promise; +} + +type NormalizedCommitOptions = { + targetBranch: string; + commitMessage: string; + expectedHeadSha?: string; + baseBranch?: string; + ephemeral?: boolean; + ephemeralBase?: boolean; + author: CommitSignature; + committer?: CommitSignature; + signal?: AbortSignal; + ttl?: number; +}; + +interface CommitBuilderDeps { + options: CreateCommitOptions; + getAuthToken: () => Promise; + transport: CommitTransport; +} + +type FileOperationState = { + path: string; + contentId: string; + mode?: string; + operation: 'upsert' | 'delete'; + streamFactory?: () => AsyncIterable; +}; + +export class CommitBuilderImpl implements CommitBuilder { + private readonly options: NormalizedCommitOptions; + private readonly getAuthToken: () => Promise; + private readonly transport: CommitTransport; + private readonly operations: FileOperationState[] = []; + private sent = false; + + constructor(deps: CommitBuilderDeps) { + this.options = normalizeCommitOptions(deps.options); + this.getAuthToken = deps.getAuthToken; + this.transport = deps.transport; + + const trimmedMessage = this.options.commitMessage?.trim(); + const trimmedAuthorName = this.options.author?.name?.trim(); + const trimmedAuthorEmail = this.options.author?.email?.trim(); + + if (!trimmedMessage) { + throw new Error('createCommit commitMessage is required'); + } + if (!trimmedAuthorName || !trimmedAuthorEmail) { + throw new Error('createCommit author name and email are required'); + } + this.options.commitMessage = trimmedMessage; + this.options.author = { + name: trimmedAuthorName, + email: trimmedAuthorEmail, + }; + if (typeof this.options.expectedHeadSha === 'string') { + this.options.expectedHeadSha = this.options.expectedHeadSha.trim(); + } + if (typeof this.options.baseBranch === 'string') { + const trimmedBase = this.options.baseBranch.trim(); + if (trimmedBase === '') { + delete this.options.baseBranch; + } else { + if (trimmedBase.startsWith('refs/')) { + throw new Error('createCommit baseBranch must not include refs/ prefix'); + } + this.options.baseBranch = trimmedBase; + } + } + + if (this.options.ephemeralBase && !this.options.baseBranch) { + throw new Error('createCommit ephemeralBase requires baseBranch'); + } + } + + addFile(path: string, source: CommitFileSource, options?: CommitFileOptions): CommitBuilder { + this.ensureNotSent(); + const normalizedPath = this.normalizePath(path); + const contentId = randomContentId(); + const mode = options?.mode ?? '100644'; + + this.operations.push({ + path: normalizedPath, + contentId, + mode, + operation: 'upsert', + streamFactory: () => toAsyncIterable(source), + }); + + return this; + } + + addFileFromString( + path: string, + contents: string, + options?: CommitTextFileOptions, + ): CommitBuilder { + const encoding = options?.encoding ?? 'utf8'; + const normalizedEncoding = encoding === 'utf-8' ? 'utf8' : encoding; + let data: Uint8Array; + if (normalizedEncoding === 'utf8') { + data = new TextEncoder().encode(contents); + } else if (BufferCtor) { + data = BufferCtor.from( + contents, + normalizedEncoding as Parameters[1], + ); + } else { + throw new Error( + `Unsupported encoding "${encoding}" in this environment. Non-UTF encodings require Node.js Buffer support.`, + ); + } + return this.addFile(path, data, options); + } + + deletePath(path: string): CommitBuilder { + this.ensureNotSent(); + const normalizedPath = this.normalizePath(path); + this.operations.push({ + path: normalizedPath, + contentId: randomContentId(), + operation: 'delete', + }); + return this; + } + + async send(): Promise { + this.ensureNotSent(); + this.sent = true; + + const metadata = this.buildMetadata(); + const blobEntries = this.operations + .filter((op) => op.operation === 'upsert' && op.streamFactory) + .map((op) => ({ + contentId: op.contentId, + chunks: chunkify(op.streamFactory!()), + })); + + const authorization = await this.getAuthToken(); + const ack = await this.transport.send({ + authorization, + signal: this.options.signal, + metadata, + blobs: blobEntries, + }); + return buildCommitResult(ack); + } + + private buildMetadata(): CommitMetadataPayload { + const files = this.operations.map((op) => { + const entry: CommitMetadataPayload['files'][number] = { + path: op.path, + content_id: op.contentId, + operation: op.operation, + }; + if (op.mode) { + entry.mode = op.mode; + } + return entry; + }); + + const metadata: CommitMetadataPayload = { + target_branch: this.options.targetBranch, + commit_message: this.options.commitMessage, + author: { + name: this.options.author.name, + email: this.options.author.email, + }, + files, + }; + + if (this.options.expectedHeadSha) { + metadata.expected_head_sha = this.options.expectedHeadSha; + } + if (this.options.baseBranch) { + metadata.base_branch = this.options.baseBranch; + } + if (this.options.committer) { + metadata.committer = { + name: this.options.committer.name, + email: this.options.committer.email, + }; + } + + if (this.options.ephemeral) { + metadata.ephemeral = true; + } + if (this.options.ephemeralBase) { + metadata.ephemeral_base = true; + } + + return metadata; + } + + private ensureNotSent(): void { + if (this.sent) { + throw new Error('createCommit builder cannot be reused after send()'); + } + } + + private normalizePath(path: string): string { + if (!path || typeof path !== 'string' || path.trim() === '') { + throw new Error('File path must be a non-empty string'); + } + return path.replace(/^\//, ''); + } +} + +export class FetchCommitTransport implements CommitTransport { + private readonly url: string; + + constructor(config: { baseUrl: string; version: number }) { + const trimmedBase = config.baseUrl.replace(/\/+$/, ''); + this.url = `${trimmedBase}/api/v${config.version}/repos/commit-pack`; + } + + async send(request: CommitTransportRequest): Promise { + const bodyIterable = buildMessageIterable(request.metadata, request.blobs); + const body = toRequestBody(bodyIterable); + + const init: RequestInit = { + method: 'POST', + headers: { + Authorization: `Bearer ${request.authorization}`, + 'Content-Type': 'application/x-ndjson', + Accept: 'application/json', + 'Code-Storage-Agent': getUserAgent(), + }, + body: body as any, + signal: request.signal, + }; + + if (requiresDuplex(body)) { + (init as RequestInit & { duplex: 'half' }).duplex = 'half'; + } + + const response = await fetch(this.url, init); + + if (!response.ok) { + const fallbackMessage = `createCommit request failed (${response.status} ${response.statusText})`; + const { statusMessage, statusLabel, refUpdate } = await parseCommitPackError( + response, + fallbackMessage, + ); + throw new RefUpdateError(statusMessage, { + status: statusLabel, + message: statusMessage, + refUpdate, + }); + } + + const ack = commitPackAckSchema.parse(await response.json()); + return ack; + } +} + +function buildMessageIterable( + metadata: CommitMetadataPayload, + blobs: Array<{ contentId: string; chunks: AsyncIterable }>, +): AsyncIterable { + const encoder = new TextEncoder(); + return { + async *[Symbol.asyncIterator]() { + yield encoder.encode(`${JSON.stringify({ metadata })}\n`); + for (const blob of blobs) { + for await (const segment of blob.chunks) { + const payload = { + blob_chunk: { + content_id: blob.contentId, + data: base64Encode(segment.chunk), + eof: segment.eof, + }, + }; + yield encoder.encode(`${JSON.stringify(payload)}\n`); + } + } + }, + }; +} + +function randomContentId(): string { + const cryptoObj = globalThis.crypto; + if (cryptoObj && typeof cryptoObj.randomUUID === 'function') { + return cryptoObj.randomUUID(); + } + const random = Math.random().toString(36).slice(2); + return `cid-${Date.now().toString(36)}-${random}`; +} + +function normalizeCommitOptions(options: CreateCommitOptions): NormalizedCommitOptions { + return { + targetBranch: resolveTargetBranch(options), + commitMessage: options.commitMessage, + expectedHeadSha: options.expectedHeadSha, + baseBranch: options.baseBranch, + ephemeral: options.ephemeral === true, + ephemeralBase: options.ephemeralBase === true, + author: options.author, + committer: options.committer, + signal: options.signal, + ttl: options.ttl, + }; +} + +function resolveTargetBranch(options: CreateCommitOptions): string { + const branchCandidate = + typeof options.targetBranch === 'string' ? options.targetBranch.trim() : ''; + if (branchCandidate) { + return normalizeBranchName(branchCandidate); + } + if (hasLegacyTargetRef(options)) { + return normalizeLegacyTargetRef(options.targetRef); + } + throw new Error('createCommit targetBranch is required'); +} + +function normalizeBranchName(value: string): string { + const trimmed = value.trim(); + if (!trimmed) { + throw new Error('createCommit targetBranch is required'); + } + if (trimmed.startsWith(HEADS_REF_PREFIX)) { + const branch = trimmed.slice(HEADS_REF_PREFIX.length).trim(); + if (!branch) { + throw new Error('createCommit targetBranch is required'); + } + return branch; + } + if (trimmed.startsWith('refs/')) { + throw new Error('createCommit targetBranch must not include refs/ prefix'); + } + return trimmed; +} + +function normalizeLegacyTargetRef(ref: string): string { + const trimmed = ref.trim(); + if (!trimmed) { + throw new Error('createCommit targetRef is required'); + } + if (!trimmed.startsWith(HEADS_REF_PREFIX)) { + throw new Error('createCommit targetRef must start with refs/heads/'); + } + const branch = trimmed.slice(HEADS_REF_PREFIX.length).trim(); + if (!branch) { + throw new Error('createCommit targetRef must include a branch name'); + } + return branch; +} + +function hasLegacyTargetRef(options: CreateCommitOptions): options is LegacyCreateCommitOptions { + return typeof (options as LegacyCreateCommitOptions).targetRef === 'string'; +} + +export function createCommitBuilder(deps: CommitBuilderDeps): CommitBuilder { + return new CommitBuilderImpl(deps); +} + +export function resolveCommitTtlSeconds(options?: { ttl?: number }): number { + if (typeof options?.ttl === 'number' && options.ttl > 0) { + return options.ttl; + } + return DEFAULT_TTL_SECONDS; +} diff --git a/packages/git-storage-sdk-node/src/diff-commit.ts b/packages/git-storage-sdk-node/src/diff-commit.ts new file mode 100644 index 000000000..97ed1f54a --- /dev/null +++ b/packages/git-storage-sdk-node/src/diff-commit.ts @@ -0,0 +1,302 @@ +import { buildCommitResult, parseCommitPackError } from './commit-pack'; +import { RefUpdateError } from './errors'; +import type { CommitPackAckRaw } from './schemas'; +import { commitPackAckSchema } from './schemas'; +import { + base64Encode, + type ChunkSegment, + chunkify, + requiresDuplex, + toAsyncIterable, + toRequestBody, +} from './stream-utils'; +import type { + CommitResult, + CommitSignature, + CreateCommitFromDiffOptions, + DiffSource, +} from './types'; +import { getUserAgent } from './version'; + +interface DiffCommitMetadataPayload { + target_branch: string; + expected_head_sha?: string; + base_branch?: string; + commit_message: string; + ephemeral?: boolean; + ephemeral_base?: boolean; + author: { + name: string; + email: string; + }; + committer?: { + name: string; + email: string; + }; +} + +interface DiffCommitTransportRequest { + authorization: string; + signal?: AbortSignal; + metadata: DiffCommitMetadataPayload; + diffChunks: AsyncIterable; +} + +interface DiffCommitTransport { + send(request: DiffCommitTransportRequest): Promise; +} + +type NormalizedDiffCommitOptions = { + targetBranch: string; + commitMessage: string; + expectedHeadSha?: string; + baseBranch?: string; + ephemeral?: boolean; + ephemeralBase?: boolean; + author: CommitSignature; + committer?: CommitSignature; + signal?: AbortSignal; + ttl?: number; + initialDiff: DiffSource; +}; + +interface CommitFromDiffSendDeps { + options: CreateCommitFromDiffOptions; + getAuthToken: () => Promise; + transport: DiffCommitTransport; +} + +class DiffCommitExecutor { + private readonly options: NormalizedDiffCommitOptions; + private readonly getAuthToken: () => Promise; + private readonly transport: DiffCommitTransport; + private readonly diffFactory: () => AsyncIterable; + private sent = false; + + constructor(deps: CommitFromDiffSendDeps) { + this.options = normalizeDiffCommitOptions(deps.options); + this.getAuthToken = deps.getAuthToken; + this.transport = deps.transport; + + const trimmedMessage = this.options.commitMessage?.trim(); + const trimmedAuthorName = this.options.author?.name?.trim(); + const trimmedAuthorEmail = this.options.author?.email?.trim(); + + if (!trimmedMessage) { + throw new Error('createCommitFromDiff commitMessage is required'); + } + if (!trimmedAuthorName || !trimmedAuthorEmail) { + throw new Error('createCommitFromDiff author name and email are required'); + } + + this.options.commitMessage = trimmedMessage; + this.options.author = { + name: trimmedAuthorName, + email: trimmedAuthorEmail, + }; + + if (typeof this.options.expectedHeadSha === 'string') { + this.options.expectedHeadSha = this.options.expectedHeadSha.trim(); + } + if (typeof this.options.baseBranch === 'string') { + const trimmedBase = this.options.baseBranch.trim(); + if (trimmedBase === '') { + delete this.options.baseBranch; + } else { + if (trimmedBase.startsWith('refs/')) { + throw new Error('createCommitFromDiff baseBranch must not include refs/ prefix'); + } + this.options.baseBranch = trimmedBase; + } + } + if (this.options.ephemeralBase && !this.options.baseBranch) { + throw new Error('createCommitFromDiff ephemeralBase requires baseBranch'); + } + + this.diffFactory = () => toAsyncIterable(this.options.initialDiff); + } + + async send(): Promise { + this.ensureNotSent(); + this.sent = true; + + const metadata = this.buildMetadata(); + const diffIterable = chunkify(this.diffFactory()); + + const authorization = await this.getAuthToken(); + const ack = await this.transport.send({ + authorization, + signal: this.options.signal, + metadata, + diffChunks: diffIterable, + }); + + return buildCommitResult(ack); + } + + private buildMetadata(): DiffCommitMetadataPayload { + const metadata: DiffCommitMetadataPayload = { + target_branch: this.options.targetBranch, + commit_message: this.options.commitMessage, + author: { + name: this.options.author.name, + email: this.options.author.email, + }, + }; + + if (this.options.expectedHeadSha) { + metadata.expected_head_sha = this.options.expectedHeadSha; + } + if (this.options.baseBranch) { + metadata.base_branch = this.options.baseBranch; + } + if (this.options.committer) { + metadata.committer = { + name: this.options.committer.name, + email: this.options.committer.email, + }; + } + if (this.options.ephemeral) { + metadata.ephemeral = true; + } + if (this.options.ephemeralBase) { + metadata.ephemeral_base = true; + } + + return metadata; + } + + private ensureNotSent(): void { + if (this.sent) { + throw new Error('createCommitFromDiff cannot be reused after send()'); + } + } +} + +export class FetchDiffCommitTransport implements DiffCommitTransport { + private readonly url: string; + + constructor(config: { baseUrl: string; version: number }) { + const trimmedBase = config.baseUrl.replace(/\/+$/, ''); + this.url = `${trimmedBase}/api/v${config.version}/repos/diff-commit`; + } + + async send(request: DiffCommitTransportRequest): Promise { + const bodyIterable = buildMessageIterable(request.metadata, request.diffChunks); + const body = toRequestBody(bodyIterable); + + const init: RequestInit = { + method: 'POST', + headers: { + Authorization: `Bearer ${request.authorization}`, + 'Content-Type': 'application/x-ndjson', + Accept: 'application/json', + 'Code-Storage-Agent': getUserAgent(), + }, + body: body as any, + signal: request.signal, + }; + + if (requiresDuplex(body)) { + (init as RequestInit & { duplex: 'half' }).duplex = 'half'; + } + + const response = await fetch(this.url, init); + if (!response.ok) { + const fallbackMessage = `createCommitFromDiff request failed (${response.status} ${response.statusText})`; + const { statusMessage, statusLabel, refUpdate } = await parseCommitPackError( + response, + fallbackMessage, + ); + throw new RefUpdateError(statusMessage, { + status: statusLabel, + message: statusMessage, + refUpdate, + }); + } + + return commitPackAckSchema.parse(await response.json()); + } +} + +function buildMessageIterable( + metadata: DiffCommitMetadataPayload, + diffChunks: AsyncIterable, +): AsyncIterable { + const encoder = new TextEncoder(); + return { + async *[Symbol.asyncIterator]() { + yield encoder.encode(`${JSON.stringify({ metadata })}\n`); + for await (const segment of diffChunks) { + const payload = { + diff_chunk: { + data: base64Encode(segment.chunk), + eof: segment.eof, + }, + }; + yield encoder.encode(`${JSON.stringify(payload)}\n`); + } + }, + }; +} + +function normalizeDiffCommitOptions( + options: CreateCommitFromDiffOptions, +): NormalizedDiffCommitOptions { + if (!options || typeof options !== 'object') { + throw new Error('createCommitFromDiff options are required'); + } + + if (options.diff === undefined || options.diff === null) { + throw new Error('createCommitFromDiff diff is required'); + } + + const targetBranch = normalizeBranchName(options.targetBranch); + + let committer: CommitSignature | undefined; + if (options.committer) { + const name = options.committer.name?.trim(); + const email = options.committer.email?.trim(); + if (!name || !email) { + throw new Error('createCommitFromDiff committer name and email are required when provided'); + } + committer = { name, email }; + } + + return { + targetBranch, + commitMessage: options.commitMessage, + expectedHeadSha: options.expectedHeadSha, + baseBranch: options.baseBranch, + ephemeral: options.ephemeral === true, + ephemeralBase: options.ephemeralBase === true, + author: options.author, + committer, + signal: options.signal, + ttl: options.ttl, + initialDiff: options.diff, + }; +} + +function normalizeBranchName(value: string | undefined): string { + const trimmed = value?.trim(); + if (!trimmed) { + throw new Error('createCommitFromDiff targetBranch is required'); + } + if (trimmed.startsWith('refs/heads/')) { + const branch = trimmed.slice('refs/heads/'.length).trim(); + if (!branch) { + throw new Error('createCommitFromDiff targetBranch must include a branch name'); + } + return branch; + } + if (trimmed.startsWith('refs/')) { + throw new Error('createCommitFromDiff targetBranch must not include refs/ prefix'); + } + return trimmed; +} + +export async function sendCommitFromDiff(deps: CommitFromDiffSendDeps): Promise { + const executor = new DiffCommitExecutor(deps); + return executor.send(); +} diff --git a/packages/git-storage-sdk-node/src/errors.ts b/packages/git-storage-sdk-node/src/errors.ts new file mode 100644 index 000000000..d33a69391 --- /dev/null +++ b/packages/git-storage-sdk-node/src/errors.ts @@ -0,0 +1,50 @@ +import type { RefUpdate, RefUpdateReason } from './types'; + +export interface RefUpdateErrorOptions { + status: string; + message?: string; + refUpdate?: Partial; + reason?: RefUpdateReason; +} + +export class RefUpdateError extends Error { + public readonly status: string; + public readonly reason: RefUpdateReason; + public readonly refUpdate?: Partial; + + constructor(message: string, options: RefUpdateErrorOptions) { + super(message); + this.name = 'RefUpdateError'; + this.status = options.status; + this.reason = options.reason ?? inferRefUpdateReason(options.status); + this.refUpdate = options.refUpdate; + } +} + +const REF_REASON_MAP: Record = { + precondition_failed: 'precondition_failed', + conflict: 'conflict', + not_found: 'not_found', + invalid: 'invalid', + timeout: 'timeout', + unauthorized: 'unauthorized', + forbidden: 'forbidden', + unavailable: 'unavailable', + internal: 'internal', + failed: 'failed', + ok: 'unknown', +}; + +export function inferRefUpdateReason(status?: string): RefUpdateReason { + if (!status) { + return 'unknown'; + } + + const trimmed = status.trim(); + if (trimmed === '') { + return 'unknown'; + } + + const label = trimmed.toLowerCase(); + return REF_REASON_MAP[label] ?? 'unknown'; +} diff --git a/packages/git-storage-sdk-node/src/fetch.ts b/packages/git-storage-sdk-node/src/fetch.ts new file mode 100644 index 000000000..a9c9ece0f --- /dev/null +++ b/packages/git-storage-sdk-node/src/fetch.ts @@ -0,0 +1,153 @@ +import { errorEnvelopeSchema } from './schemas'; +import type { ValidAPIVersion, ValidMethod, ValidPath } from './types'; +import { getUserAgent } from './version'; + +interface RequestOptions { + allowedStatus?: number[]; +} + +export class ApiError extends Error { + public readonly status: number; + public readonly statusText: string; + public readonly method: ValidMethod; + public readonly url: string; + public readonly body?: unknown; + + constructor(params: { + message: string; + status: number; + statusText: string; + method: ValidMethod; + url: string; + body?: unknown; + }) { + super(params.message); + this.name = 'ApiError'; + this.status = params.status; + this.statusText = params.statusText; + this.method = params.method; + this.url = params.url; + this.body = params.body; + } +} + +export class ApiFetcher { + constructor( + private readonly API_BASE_URL: string, + private readonly version: ValidAPIVersion, + ) {} + + private getBaseUrl() { + return `${this.API_BASE_URL}/api/v${this.version}`; + } + + private getRequestUrl(path: ValidPath) { + if (typeof path === 'string') { + return `${this.getBaseUrl()}/${path}`; + } else if (path.params) { + const searchParams = new URLSearchParams(); + for (const [key, value] of Object.entries(path.params)) { + if (Array.isArray(value)) { + for (const v of value) { + searchParams.append(key, v); + } + } else { + searchParams.append(key, value); + } + } + const paramStr = searchParams.toString(); + return `${this.getBaseUrl()}/${path.path}${paramStr ? `?${paramStr}` : ''}`; + } else { + return `${this.getBaseUrl()}/${path.path}`; + } + } + + private async fetch(path: ValidPath, method: ValidMethod, jwt: string, options?: RequestOptions) { + const requestUrl = this.getRequestUrl(path); + + const requestOptions: RequestInit = { + method, + headers: { + Authorization: `Bearer ${jwt}`, + 'Content-Type': 'application/json', + 'Code-Storage-Agent': getUserAgent(), + }, + }; + + if (method !== 'GET' && typeof path !== 'string' && path.body) { + requestOptions.body = JSON.stringify(path.body); + } + + const response = await fetch(requestUrl, requestOptions); + + if (!response.ok) { + const allowed = options?.allowedStatus ?? []; + if (allowed.includes(response.status)) { + return response; + } + + let errorBody: unknown; + let message: string | undefined; + const contentType = response.headers.get('content-type') ?? ''; + + try { + if (contentType.includes('application/json')) { + errorBody = await response.json(); + } else { + const text = await response.text(); + errorBody = text; + } + } catch { + // Fallback to plain text if JSON parse failed after reading body + try { + errorBody = await response.text(); + } catch { + errorBody = undefined; + } + } + + if (typeof errorBody === 'string') { + const trimmed = errorBody.trim(); + if (trimmed) { + message = trimmed; + } + } else if (errorBody && typeof errorBody === 'object') { + const parsedError = errorEnvelopeSchema.safeParse(errorBody); + if (parsedError.success) { + const trimmed = parsedError.data.error.trim(); + if (trimmed) { + message = trimmed; + } + } + } + + throw new ApiError({ + message: + message ?? + `Request ${method} ${requestUrl} failed with status ${response.status} ${response.statusText}`, + status: response.status, + statusText: response.statusText, + method, + url: requestUrl, + body: errorBody, + }); + } + return response; + } + + async get(path: ValidPath, jwt: string, options?: RequestOptions) { + return this.fetch(path, 'GET', jwt, options); + } + + async post(path: ValidPath, jwt: string, options?: RequestOptions) { + return this.fetch(path, 'POST', jwt, options); + } + + async put(path: ValidPath, jwt: string, options?: RequestOptions) { + return this.fetch(path, 'PUT', jwt, options); + } + + async delete(path: ValidPath, jwt: string, options?: RequestOptions) { + return this.fetch(path, 'DELETE', jwt, options); + } +} diff --git a/packages/git-storage-sdk-node/src/index.ts b/packages/git-storage-sdk-node/src/index.ts new file mode 100644 index 000000000..070c5e36c --- /dev/null +++ b/packages/git-storage-sdk-node/src/index.ts @@ -0,0 +1,1391 @@ +/** + * Pierre Git Storage SDK + * + * A TypeScript SDK for interacting with Pierre's git storage system + */ + +import { importPKCS8, SignJWT } from 'jose'; +import snakecaseKeys from 'snakecase-keys'; +import { createCommitBuilder, FetchCommitTransport, resolveCommitTtlSeconds } from './commit'; +import { FetchDiffCommitTransport, sendCommitFromDiff } from './diff-commit'; +import { RefUpdateError } from './errors'; +import { ApiError, ApiFetcher } from './fetch'; +import type { RestoreCommitAckRaw } from './schemas'; +import { + branchDiffResponseSchema, + commitDiffResponseSchema, + createBranchResponseSchema, + errorEnvelopeSchema, + grepResponseSchema, + listBranchesResponseSchema, + listCommitsResponseSchema, + listFilesResponseSchema, + listReposResponseSchema, + noteReadResponseSchema, + noteWriteResponseSchema, + restoreCommitAckSchema, + restoreCommitResponseSchema, +} from './schemas'; +import type { + AppendNoteOptions, + BranchInfo, + CommitBuilder, + CommitInfo, + CommitResult, + CreateBranchOptions, + CreateBranchResponse, + CreateBranchResult, + CreateCommitFromDiffOptions, + CreateCommitOptions, + CreateNoteOptions, + CreateRepoOptions, + DeleteNoteOptions, + DeleteRepoOptions, + DeleteRepoResult, + DiffFileState, + FileDiff, + FilteredFile, + FindOneOptions, + GetBranchDiffOptions, + GetBranchDiffResponse, + GetBranchDiffResult, + GetCommitDiffOptions, + GetCommitDiffResponse, + GetCommitDiffResult, + GetFileOptions, + GetNoteOptions, + GetNoteResult, + GetRemoteURLOptions, + GitStorageOptions, + GrepFileMatch, + GrepLine, + GrepOptions, + GrepResult, + ListBranchesOptions, + ListBranchesResponse, + ListBranchesResult, + ListCommitsOptions, + ListCommitsResponse, + ListCommitsResult, + ListFilesOptions, + ListFilesResult, + ListReposOptions, + ListReposResponse, + ListReposResult, + NoteWriteResult, + PullUpstreamOptions, + RawBranchInfo, + RawCommitInfo, + RawFileDiff, + RawFilteredFile, + RefUpdate, + Repo, + RestoreCommitOptions, + RestoreCommitResult, + ValidAPIVersion, +} from './types'; + +/** + * Type definitions for Pierre Git Storage SDK + */ + +export { RefUpdateError } from './errors'; +export { ApiError } from './fetch'; +// Import additional types from types.ts +export * from './types'; + +// Export webhook validation utilities +export { parseSignatureHeader, validateWebhook, validateWebhookSignature } from './webhook'; + +/** + * Git Storage API + */ + +declare const __STORAGE_BASE_URL__: string; +declare const __API_BASE_URL__: string; + +const API_BASE_URL = __API_BASE_URL__; +const STORAGE_BASE_URL = __STORAGE_BASE_URL__; +const API_VERSION: ValidAPIVersion = 1; + +const apiInstanceMap = new Map(); +const DEFAULT_TOKEN_TTL_SECONDS = 60 * 60; // 1 hour +const RESTORE_COMMIT_ALLOWED_STATUS = [ + 400, // Bad Request - validation errors + 401, // Unauthorized - missing/invalid auth header + 403, // Forbidden - missing git:write scope + 404, // Not Found - repo lookup failures + 408, // Request Timeout - client cancelled + 409, // Conflict - concurrent ref updates + 412, // Precondition Failed - optimistic concurrency + 422, // Unprocessable Entity - metadata issues + 429, // Too Many Requests - upstream throttling + 499, // Client Closed Request - storage cancellation + 500, // Internal Server Error - generic failure + 502, // Bad Gateway - storage/gateway bridge issues + 503, // Service Unavailable - storage selection failures + 504, // Gateway Timeout - long-running storage operations +] as const; + +const NOTE_WRITE_ALLOWED_STATUS = [ + 400, // Bad Request - validation errors + 401, // Unauthorized - missing/invalid auth header + 403, // Forbidden - missing git:write scope + 404, // Not Found - repo or note lookup failures + 408, // Request Timeout - client cancelled + 409, // Conflict - concurrent ref updates + 412, // Precondition Failed - optimistic concurrency + 422, // Unprocessable Entity - metadata issues + 429, // Too Many Requests - upstream throttling + 499, // Client Closed Request - storage cancellation + 500, // Internal Server Error - generic failure + 502, // Bad Gateway - storage/gateway bridge issues + 503, // Service Unavailable - storage selection failures + 504, // Gateway Timeout - long-running storage operations +] as const; + +function resolveInvocationTtlSeconds( + options?: { ttl?: number }, + defaultValue: number = DEFAULT_TOKEN_TTL_SECONDS, +): number { + if (typeof options?.ttl === 'number' && options.ttl > 0) { + return options.ttl; + } + return defaultValue; +} + +type RestoreCommitAck = RestoreCommitAckRaw; + +function toRefUpdate(result: RestoreCommitAck['result']): RefUpdate { + return { + branch: result.branch, + oldSha: result.old_sha, + newSha: result.new_sha, + }; +} + +function buildRestoreCommitResult(ack: RestoreCommitAck): RestoreCommitResult { + const refUpdate = toRefUpdate(ack.result); + if (!ack.result.success) { + throw new RefUpdateError( + ack.result.message ?? `Restore commit failed with status ${ack.result.status}`, + { + status: ack.result.status, + message: ack.result.message, + refUpdate, + }, + ); + } + return { + commitSha: ack.commit.commit_sha, + treeSha: ack.commit.tree_sha, + targetBranch: ack.commit.target_branch, + packBytes: ack.commit.pack_bytes, + refUpdate, + }; +} + +interface RestoreCommitFailureInfo { + status?: string; + message?: string; + refUpdate?: Partial; +} + +function toPartialRefUpdate( + branch?: unknown, + oldSha?: unknown, + newSha?: unknown, +): Partial | undefined { + const refUpdate: Partial = {}; + if (typeof branch === 'string' && branch.trim() !== '') { + refUpdate.branch = branch; + } + if (typeof oldSha === 'string' && oldSha.trim() !== '') { + refUpdate.oldSha = oldSha; + } + if (typeof newSha === 'string' && newSha.trim() !== '') { + refUpdate.newSha = newSha; + } + return Object.keys(refUpdate).length > 0 ? refUpdate : undefined; +} + +function parseRestoreCommitPayload( + payload: unknown, +): { ack: RestoreCommitAck } | { failure: RestoreCommitFailureInfo } | null { + const ack = restoreCommitAckSchema.safeParse(payload); + if (ack.success) { + return { ack: ack.data }; + } + + const failure = restoreCommitResponseSchema.safeParse(payload); + if (failure.success) { + const result = failure.data.result; + return { + failure: { + status: result.status, + message: result.message, + refUpdate: toPartialRefUpdate(result.branch, result.old_sha, result.new_sha), + }, + }; + } + + return null; +} + +function httpStatusToRestoreStatus(status: number): string { + switch (status) { + case 409: + return 'conflict'; + case 412: + return 'precondition_failed'; + default: + return `${status}`; + } +} + +function getApiInstance(baseUrl: string, version: ValidAPIVersion) { + if (!apiInstanceMap.has(`${baseUrl}--${version}`)) { + apiInstanceMap.set(`${baseUrl}--${version}`, new ApiFetcher(baseUrl, version)); + } + return apiInstanceMap.get(`${baseUrl}--${version}`)!; +} + +function transformBranchInfo(raw: RawBranchInfo): BranchInfo { + return { + cursor: raw.cursor, + name: raw.name, + headSha: raw.head_sha, + createdAt: raw.created_at, + }; +} + +function transformListBranchesResult(raw: ListBranchesResponse): ListBranchesResult { + return { + branches: raw.branches.map(transformBranchInfo), + nextCursor: raw.next_cursor ?? undefined, + hasMore: raw.has_more, + }; +} + +function transformCommitInfo(raw: RawCommitInfo): CommitInfo { + const parsedDate = new Date(raw.date); + return { + sha: raw.sha, + message: raw.message, + authorName: raw.author_name, + authorEmail: raw.author_email, + committerName: raw.committer_name, + committerEmail: raw.committer_email, + date: parsedDate, + rawDate: raw.date, + }; +} + +function transformListCommitsResult(raw: ListCommitsResponse): ListCommitsResult { + return { + commits: raw.commits.map(transformCommitInfo), + nextCursor: raw.next_cursor ?? undefined, + hasMore: raw.has_more, + }; +} + +function normalizeDiffState(rawState: string): DiffFileState { + if (!rawState) { + return 'unknown'; + } + const leading = rawState.trim()[0]?.toUpperCase(); + switch (leading) { + case 'A': + return 'added'; + case 'M': + return 'modified'; + case 'D': + return 'deleted'; + case 'R': + return 'renamed'; + case 'C': + return 'copied'; + case 'T': + return 'type_changed'; + case 'U': + return 'unmerged'; + default: + return 'unknown'; + } +} + +function transformFileDiff(raw: RawFileDiff): FileDiff { + const normalizedState = normalizeDiffState(raw.state); + return { + path: raw.path, + state: normalizedState, + rawState: raw.state, + oldPath: raw.old_path ?? undefined, + raw: raw.raw, + bytes: raw.bytes, + isEof: raw.is_eof, + }; +} + +function transformFilteredFile(raw: RawFilteredFile): FilteredFile { + const normalizedState = normalizeDiffState(raw.state); + return { + path: raw.path, + state: normalizedState, + rawState: raw.state, + oldPath: raw.old_path ?? undefined, + bytes: raw.bytes, + isEof: raw.is_eof, + }; +} + +function transformBranchDiffResult(raw: GetBranchDiffResponse): GetBranchDiffResult { + return { + branch: raw.branch, + base: raw.base, + stats: raw.stats, + files: raw.files.map(transformFileDiff), + filteredFiles: raw.filtered_files.map(transformFilteredFile), + }; +} + +function transformCommitDiffResult(raw: GetCommitDiffResponse): GetCommitDiffResult { + return { + sha: raw.sha, + stats: raw.stats, + files: raw.files.map(transformFileDiff), + filteredFiles: raw.filtered_files.map(transformFilteredFile), + }; +} + +function transformCreateBranchResult(raw: CreateBranchResponse): CreateBranchResult { + return { + message: raw.message, + targetBranch: raw.target_branch, + targetIsEphemeral: raw.target_is_ephemeral, + commitSha: raw.commit_sha ?? undefined, + }; +} + +function transformListReposResult(raw: ListReposResponse): ListReposResult { + return { + repos: raw.repos.map((repo) => ({ + repoId: repo.repo_id, + url: repo.url, + defaultBranch: repo.default_branch, + createdAt: repo.created_at, + baseRepo: repo.base_repo + ? { + provider: repo.base_repo.provider, + owner: repo.base_repo.owner, + name: repo.base_repo.name, + } + : undefined, + })), + nextCursor: raw.next_cursor ?? undefined, + hasMore: raw.has_more, + }; +} + +function transformGrepLine(raw: { line_number: number; text: string; type: string }): GrepLine { + return { + lineNumber: raw.line_number, + text: raw.text, + type: raw.type, + }; +} + +function transformGrepFileMatch(raw: { + path: string; + lines: { line_number: number; text: string; type: string }[]; +}): GrepFileMatch { + return { + path: raw.path, + lines: raw.lines.map(transformGrepLine), + }; +} + +function transformNoteReadResult(raw: { + sha: string; + note: string; + ref_sha: string; +}): GetNoteResult { + return { + sha: raw.sha, + note: raw.note, + refSha: raw.ref_sha, + }; +} + +function transformNoteWriteResult(raw: { + sha: string; + target_ref: string; + base_commit?: string; + new_ref_sha: string; + result: { success: boolean; status: string; message?: string }; +}): NoteWriteResult { + return { + sha: raw.sha, + targetRef: raw.target_ref, + baseCommit: raw.base_commit, + newRefSha: raw.new_ref_sha, + result: { + success: raw.result.success, + status: raw.result.status, + message: raw.result.message, + }, + }; +} + +function buildNoteWriteBody( + sha: string, + note: string, + action: 'add' | 'append', + options: { expectedRefSha?: string; author?: { name: string; email: string } }, +): Record { + const body: Record = { + sha, + action, + note, + }; + + const expectedRefSha = options.expectedRefSha?.trim(); + if (expectedRefSha) { + body.expected_ref_sha = expectedRefSha; + } + + if (options.author) { + const authorName = options.author.name?.trim(); + const authorEmail = options.author.email?.trim(); + if (!authorName || !authorEmail) { + throw new Error('note author name and email are required when provided'); + } + body.author = { + name: authorName, + email: authorEmail, + }; + } + + return body; +} + +async function parseNoteWriteResponse( + response: Response, + method: 'POST' | 'DELETE', +): Promise { + let jsonBody: unknown; + const contentType = response.headers.get('content-type') ?? ''; + try { + if (contentType.includes('application/json')) { + jsonBody = await response.json(); + } else { + jsonBody = await response.text(); + } + } catch { + jsonBody = undefined; + } + + if (jsonBody && typeof jsonBody === 'object') { + const parsed = noteWriteResponseSchema.safeParse(jsonBody); + if (parsed.success) { + return transformNoteWriteResult(parsed.data); + } + const parsedError = errorEnvelopeSchema.safeParse(jsonBody); + if (parsedError.success) { + throw new ApiError({ + message: parsedError.data.error, + status: response.status, + statusText: response.statusText, + method, + url: response.url, + body: jsonBody, + }); + } + } + + const fallbackMessage = + typeof jsonBody === 'string' && jsonBody.trim() !== '' + ? jsonBody.trim() + : `Request ${method} ${response.url} failed with status ${response.status} ${response.statusText}`; + + throw new ApiError({ + message: fallbackMessage, + status: response.status, + statusText: response.statusText, + method, + url: response.url, + body: jsonBody, + }); +} + +/** + * Implementation of the Repo interface + */ +class RepoImpl implements Repo { + private readonly api: ApiFetcher; + + constructor( + public readonly id: string, + public readonly defaultBranch: string, + private readonly options: GitStorageOptions, + private readonly generateJWT: ( + repoId: string, + options?: GetRemoteURLOptions, + ) => Promise, + ) { + this.api = getApiInstance( + this.options.apiBaseUrl ?? GitStorage.getDefaultAPIBaseUrl(options.name), + this.options.apiVersion ?? API_VERSION, + ); + } + + async getRemoteURL(urlOptions?: GetRemoteURLOptions): Promise { + const url = new URL(`https://${this.options.storageBaseUrl}/${this.id}.git`); + url.username = `t`; + url.password = await this.generateJWT(this.id, urlOptions); + return url.toString(); + } + + async getEphemeralRemoteURL(urlOptions?: GetRemoteURLOptions): Promise { + const url = new URL(`https://${this.options.storageBaseUrl}/${this.id}+ephemeral.git`); + url.username = `t`; + url.password = await this.generateJWT(this.id, urlOptions); + return url.toString(); + } + + async getFileStream(options: GetFileOptions): Promise { + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:read'], + ttl, + }); + + const params: Record = { + path: options.path, + }; + + if (options.ref) { + params.ref = options.ref; + } + if (typeof options.ephemeral === 'boolean') { + params.ephemeral = String(options.ephemeral); + } + if (typeof options.ephemeralBase === 'boolean') { + params.ephemeral_base = String(options.ephemeralBase); + } + + // Return the raw fetch Response for streaming + return this.api.get({ path: 'repos/file', params }, jwt); + } + + async listFiles(options?: ListFilesOptions): Promise { + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:read'], + ttl, + }); + + const params: Record = {}; + if (options?.ref) { + params.ref = options.ref; + } + if (typeof options?.ephemeral === 'boolean') { + params.ephemeral = String(options.ephemeral); + } + const response = await this.api.get( + { path: 'repos/files', params: Object.keys(params).length ? params : undefined }, + jwt, + ); + + const raw = listFilesResponseSchema.parse(await response.json()); + return { paths: raw.paths, ref: raw.ref }; + } + + async listBranches(options?: ListBranchesOptions): Promise { + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:read'], + ttl, + }); + + const cursor = options?.cursor; + const limit = options?.limit; + + let params: Record | undefined; + + if (typeof cursor === 'string' || typeof limit === 'number') { + params = {}; + if (typeof cursor === 'string') { + params.cursor = cursor; + } + if (typeof limit === 'number') { + params.limit = limit.toString(); + } + } + + const response = await this.api.get({ path: 'repos/branches', params }, jwt); + + const raw = listBranchesResponseSchema.parse(await response.json()); + return transformListBranchesResult({ + ...raw, + next_cursor: raw.next_cursor ?? undefined, + }); + } + + async listCommits(options?: ListCommitsOptions): Promise { + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:read'], + ttl, + }); + + let params: Record | undefined; + + if (options?.branch || options?.cursor || options?.limit) { + params = {}; + if (options?.branch) { + params.branch = options.branch; + } + if (options?.cursor) { + params.cursor = options.cursor; + } + if (typeof options?.limit == 'number') { + params.limit = options.limit.toString(); + } + } + + const response = await this.api.get({ path: 'repos/commits', params }, jwt); + + const raw = listCommitsResponseSchema.parse(await response.json()); + return transformListCommitsResult({ + ...raw, + next_cursor: raw.next_cursor ?? undefined, + }); + } + + async getNote(options: GetNoteOptions): Promise { + const sha = options?.sha?.trim(); + if (!sha) { + throw new Error('getNote sha is required'); + } + + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:read'], + ttl, + }); + + const response = await this.api.get({ path: 'repos/notes', params: { sha } }, jwt); + const raw = noteReadResponseSchema.parse(await response.json()); + return transformNoteReadResult(raw); + } + + async createNote(options: CreateNoteOptions): Promise { + const sha = options?.sha?.trim(); + if (!sha) { + throw new Error('createNote sha is required'); + } + + const note = options?.note?.trim(); + if (!note) { + throw new Error('createNote note is required'); + } + + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:write'], + ttl, + }); + + const body = buildNoteWriteBody(sha, note, 'add', { + expectedRefSha: options.expectedRefSha, + author: options.author, + }); + + const response = await this.api.post({ path: 'repos/notes', body }, jwt, { + allowedStatus: [...NOTE_WRITE_ALLOWED_STATUS], + }); + + const result = await parseNoteWriteResponse(response, 'POST'); + if (!result.result.success) { + throw new RefUpdateError( + result.result.message ?? `createNote failed with status ${result.result.status}`, + { + status: result.result.status, + message: result.result.message, + refUpdate: toPartialRefUpdate(result.targetRef, result.baseCommit, result.newRefSha), + }, + ); + } + return result; + } + + async appendNote(options: AppendNoteOptions): Promise { + const sha = options?.sha?.trim(); + if (!sha) { + throw new Error('appendNote sha is required'); + } + + const note = options?.note?.trim(); + if (!note) { + throw new Error('appendNote note is required'); + } + + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:write'], + ttl, + }); + + const body = buildNoteWriteBody(sha, note, 'append', { + expectedRefSha: options.expectedRefSha, + author: options.author, + }); + + const response = await this.api.post({ path: 'repos/notes', body }, jwt, { + allowedStatus: [...NOTE_WRITE_ALLOWED_STATUS], + }); + + const result = await parseNoteWriteResponse(response, 'POST'); + if (!result.result.success) { + throw new RefUpdateError( + result.result.message ?? `appendNote failed with status ${result.result.status}`, + { + status: result.result.status, + message: result.result.message, + refUpdate: toPartialRefUpdate(result.targetRef, result.baseCommit, result.newRefSha), + }, + ); + } + return result; + } + + async deleteNote(options: DeleteNoteOptions): Promise { + const sha = options?.sha?.trim(); + if (!sha) { + throw new Error('deleteNote sha is required'); + } + + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:write'], + ttl, + }); + + const body: Record = { + sha, + }; + + const expectedRefSha = options.expectedRefSha?.trim(); + if (expectedRefSha) { + body.expected_ref_sha = expectedRefSha; + } + + if (options.author) { + const authorName = options.author.name?.trim(); + const authorEmail = options.author.email?.trim(); + if (!authorName || !authorEmail) { + throw new Error('deleteNote author name and email are required when provided'); + } + body.author = { + name: authorName, + email: authorEmail, + }; + } + + const response = await this.api.delete({ path: 'repos/notes', body }, jwt, { + allowedStatus: [...NOTE_WRITE_ALLOWED_STATUS], + }); + + const result = await parseNoteWriteResponse(response, 'DELETE'); + if (!result.result.success) { + throw new RefUpdateError( + result.result.message ?? `deleteNote failed with status ${result.result.status}`, + { + status: result.result.status, + message: result.result.message, + refUpdate: toPartialRefUpdate(result.targetRef, result.baseCommit, result.newRefSha), + }, + ); + } + return result; + } + + async getBranchDiff(options: GetBranchDiffOptions): Promise { + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:read'], + ttl, + }); + + const params: Record = { + branch: options.branch, + }; + + if (options.base) { + params.base = options.base; + } + if (typeof options.ephemeral === 'boolean') { + params.ephemeral = String(options.ephemeral); + } + if (typeof options.ephemeralBase === 'boolean') { + params.ephemeral_base = String(options.ephemeralBase); + } + if (options.paths && options.paths.length > 0) { + params.path = options.paths; + } + + const response = await this.api.get({ path: 'repos/branches/diff', params }, jwt); + + const raw = branchDiffResponseSchema.parse(await response.json()); + return transformBranchDiffResult(raw); + } + + async getCommitDiff(options: GetCommitDiffOptions): Promise { + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:read'], + ttl, + }); + + const params: Record = { + sha: options.sha, + }; + + if (options.baseSha) { + params.baseSha = options.baseSha; + } + if (options.paths && options.paths.length > 0) { + params.path = options.paths; + } + + const response = await this.api.get({ path: 'repos/diff', params }, jwt); + + const raw = commitDiffResponseSchema.parse(await response.json()); + return transformCommitDiffResult(raw); + } + + async grep(options: GrepOptions): Promise { + const pattern = options?.query?.pattern?.trim(); + if (!pattern) { + throw new Error('grep query.pattern is required'); + } + + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:read'], + ttl, + }); + + const body: Record = { + query: { + pattern, + ...(typeof options.query.caseSensitive === 'boolean' + ? { case_sensitive: options.query.caseSensitive } + : {}), + }, + }; + + if (options.ref) { + body.rev = options.ref; + } + if (Array.isArray(options.paths) && options.paths.length > 0) { + body.paths = options.paths; + } + if (options.fileFilters) { + body.file_filters = { + ...(options.fileFilters.includeGlobs + ? { include_globs: options.fileFilters.includeGlobs } + : {}), + ...(options.fileFilters.excludeGlobs + ? { exclude_globs: options.fileFilters.excludeGlobs } + : {}), + ...(options.fileFilters.extensionFilters + ? { extension_filters: options.fileFilters.extensionFilters } + : {}), + }; + } + if (options.context) { + body.context = { + ...(typeof options.context.before === 'number' ? { before: options.context.before } : {}), + ...(typeof options.context.after === 'number' ? { after: options.context.after } : {}), + }; + } + if (options.limits) { + body.limits = { + ...(typeof options.limits.maxLines === 'number' + ? { max_lines: options.limits.maxLines } + : {}), + ...(typeof options.limits.maxMatchesPerFile === 'number' + ? { max_matches_per_file: options.limits.maxMatchesPerFile } + : {}), + }; + } + if (options.pagination) { + body.pagination = { + ...(typeof options.pagination.cursor === 'string' && options.pagination.cursor.trim() !== '' + ? { cursor: options.pagination.cursor } + : {}), + ...(typeof options.pagination.limit === 'number' + ? { limit: options.pagination.limit } + : {}), + }; + } + + const response = await this.api.post({ path: 'repos/grep', body }, jwt); + const raw = grepResponseSchema.parse(await response.json()); + + return { + query: { + pattern: raw.query.pattern, + caseSensitive: raw.query.case_sensitive, + }, + repo: { + ref: raw.repo.ref, + commit: raw.repo.commit, + }, + matches: raw.matches.map(transformGrepFileMatch), + nextCursor: raw.next_cursor ?? undefined, + hasMore: raw.has_more, + }; + } + + async pullUpstream(options: PullUpstreamOptions = {}): Promise { + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:write'], + ttl, + }); + + const body: Record = {}; + + if (options.ref) { + body.ref = options.ref; + } + + const response = await this.api.post({ path: 'repos/pull-upstream', body }, jwt); + + if (response.status !== 202) { + throw new Error(`Pull Upstream failed: ${response.status} ${await response.text()}`); + } + + return; + } + + async createBranch(options: CreateBranchOptions): Promise { + const baseBranch = options?.baseBranch?.trim(); + if (!baseBranch) { + throw new Error('createBranch baseBranch is required'); + } + const targetBranch = options?.targetBranch?.trim(); + if (!targetBranch) { + throw new Error('createBranch targetBranch is required'); + } + + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:write'], + ttl, + }); + + const body: Record = { + base_branch: baseBranch, + target_branch: targetBranch, + }; + + if (options.baseIsEphemeral === true) { + body.base_is_ephemeral = true; + } + if (options.targetIsEphemeral === true) { + body.target_is_ephemeral = true; + } + + const response = await this.api.post({ path: 'repos/branches/create', body }, jwt); + const raw = createBranchResponseSchema.parse(await response.json()); + return transformCreateBranchResult(raw); + } + + async restoreCommit(options: RestoreCommitOptions): Promise { + const targetBranch = options?.targetBranch?.trim(); + if (!targetBranch) { + throw new Error('restoreCommit targetBranch is required'); + } + if (targetBranch.startsWith('refs/')) { + throw new Error('restoreCommit targetBranch must not include refs/ prefix'); + } + + const targetCommitSha = options?.targetCommitSha?.trim(); + if (!targetCommitSha) { + throw new Error('restoreCommit targetCommitSha is required'); + } + const commitMessage = options?.commitMessage?.trim(); + + const authorName = options.author?.name?.trim(); + const authorEmail = options.author?.email?.trim(); + if (!authorName || !authorEmail) { + throw new Error('restoreCommit author name and email are required'); + } + + const ttl = resolveCommitTtlSeconds(options); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:write'], + ttl, + }); + + const metadata: Record = { + target_branch: targetBranch, + target_commit_sha: targetCommitSha, + author: { + name: authorName, + email: authorEmail, + }, + }; + + if (commitMessage) { + metadata.commit_message = commitMessage; + } + + const expectedHeadSha = options.expectedHeadSha?.trim(); + if (expectedHeadSha) { + metadata.expected_head_sha = expectedHeadSha; + } + + if (options.committer) { + const committerName = options.committer.name?.trim(); + const committerEmail = options.committer.email?.trim(); + if (!committerName || !committerEmail) { + throw new Error('restoreCommit committer name and email are required when provided'); + } + metadata.committer = { + name: committerName, + email: committerEmail, + }; + } + + const response = await this.api.post( + { path: 'repos/restore-commit', body: { metadata } }, + jwt, + { + allowedStatus: [...RESTORE_COMMIT_ALLOWED_STATUS], + }, + ); + + const payload = await response.json(); + const parsed = parseRestoreCommitPayload(payload); + if (parsed && 'ack' in parsed) { + return buildRestoreCommitResult(parsed.ack); + } + + const failure = parsed && 'failure' in parsed ? parsed.failure : undefined; + const status = failure?.status ?? httpStatusToRestoreStatus(response.status); + const message = + failure?.message ?? + `Restore commit failed with HTTP ${response.status}` + + (response.statusText ? ` ${response.statusText}` : ''); + + throw new RefUpdateError(message, { + status, + refUpdate: failure?.refUpdate, + }); + } + + createCommit(options: CreateCommitOptions): CommitBuilder { + const version = this.options.apiVersion ?? API_VERSION; + const baseUrl = this.options.apiBaseUrl ?? GitStorage.getDefaultAPIBaseUrl(this.options.name); + const transport = new FetchCommitTransport({ baseUrl, version }); + const ttl = resolveCommitTtlSeconds(options); + const builderOptions: CreateCommitOptions = { + ...options, + ttl, + }; + const getAuthToken = () => + this.generateJWT(this.id, { + permissions: ['git:write'], + ttl, + }); + + return createCommitBuilder({ + options: builderOptions, + getAuthToken, + transport, + }); + } + + async createCommitFromDiff(options: CreateCommitFromDiffOptions): Promise { + const version = this.options.apiVersion ?? API_VERSION; + const baseUrl = this.options.apiBaseUrl ?? GitStorage.getDefaultAPIBaseUrl(this.options.name); + const transport = new FetchDiffCommitTransport({ baseUrl, version }); + const ttl = resolveCommitTtlSeconds(options); + const requestOptions: CreateCommitFromDiffOptions = { + ...options, + ttl, + }; + const getAuthToken = () => + this.generateJWT(this.id, { + permissions: ['git:write'], + ttl, + }); + + return sendCommitFromDiff({ + options: requestOptions, + getAuthToken, + transport, + }); + } +} + +export class GitStorage { + private options: GitStorageOptions; + private api: ApiFetcher; + + constructor(options: GitStorageOptions) { + if ( + !options || + options.name === undefined || + options.key === undefined || + options.name === null || + options.key === null + ) { + throw new Error( + 'GitStorage requires a name and key. Please check your configuration and try again.', + ); + } + + if (typeof options.name !== 'string' || options.name.trim() === '') { + throw new Error('GitStorage name must be a non-empty string.'); + } + + if (typeof options.key !== 'string' || options.key.trim() === '') { + throw new Error('GitStorage key must be a non-empty string.'); + } + + const resolvedApiBaseUrl = options.apiBaseUrl ?? GitStorage.getDefaultAPIBaseUrl(options.name); + const resolvedApiVersion = options.apiVersion ?? API_VERSION; + const resolvedStorageBaseUrl = + options.storageBaseUrl ?? GitStorage.getDefaultStorageBaseUrl(options.name); + const resolvedDefaultTtl = options.defaultTTL; + + this.api = getApiInstance(resolvedApiBaseUrl, resolvedApiVersion); + + this.options = { + key: options.key, + name: options.name, + apiBaseUrl: resolvedApiBaseUrl, + apiVersion: resolvedApiVersion, + storageBaseUrl: resolvedStorageBaseUrl, + defaultTTL: resolvedDefaultTtl, + }; + } + + static getDefaultAPIBaseUrl(name: string): string { + return API_BASE_URL.replace('{{org}}', name); + } + + static getDefaultStorageBaseUrl(name: string): string { + return STORAGE_BASE_URL.replace('{{org}}', name); + } + + /** + * Create a new repository + * @returns The created repository + */ + async createRepo(options?: CreateRepoOptions): Promise { + const repoId = options?.id || crypto.randomUUID(); + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(repoId, { + permissions: ['repo:write'], + ttl, + }); + + const baseRepo = options?.baseRepo; + const isFork = baseRepo ? 'id' in baseRepo : false; + let baseRepoOptions: Record | null = null; + let resolvedDefaultBranch: string | undefined; + + if (baseRepo) { + if ('id' in baseRepo) { + const baseRepoToken = await this.generateJWT(baseRepo.id, { + permissions: ['git:read'], + ttl, + }); + baseRepoOptions = { + provider: 'code', + owner: this.options.name, + name: baseRepo.id, + operation: 'fork', + auth: { token: baseRepoToken }, + ...(baseRepo.ref ? { ref: baseRepo.ref } : {}), + ...(baseRepo.sha ? { sha: baseRepo.sha } : {}), + }; + } else { + baseRepoOptions = { + provider: 'github', + ...snakecaseKeys(baseRepo as unknown as Record), + }; + resolvedDefaultBranch = baseRepo.defaultBranch; + } + } + + // Match backend priority: baseRepo.defaultBranch > options.defaultBranch > 'main' + if (!resolvedDefaultBranch) { + if (options?.defaultBranch) { + resolvedDefaultBranch = options.defaultBranch; + } else if (!isFork) { + resolvedDefaultBranch = 'main'; + } + } + + const createRepoPath = + baseRepoOptions || resolvedDefaultBranch + ? { + path: 'repos', + body: { + ...(baseRepoOptions && { base_repo: baseRepoOptions }), + ...(resolvedDefaultBranch && { default_branch: resolvedDefaultBranch }), + }, + } + : 'repos'; + + // Allow 409 so we can map it to a clearer error message + const resp = await this.api.post(createRepoPath, jwt, { allowedStatus: [409] }); + if (resp.status === 409) { + throw new Error('Repository already exists'); + } + + return new RepoImpl( + repoId, + resolvedDefaultBranch ?? 'main', + this.options, + this.generateJWT.bind(this), + ); + } + + /** + * List repositories for the authenticated organization + * @returns Paginated repositories list + */ + async listRepos(options?: ListReposOptions): Promise { + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT('org', { + permissions: ['org:read'], + ttl, + }); + + let params: Record | undefined; + if (options?.cursor || typeof options?.limit === 'number') { + params = {}; + if (options.cursor) { + params.cursor = options.cursor; + } + if (typeof options.limit === 'number') { + params.limit = options.limit.toString(); + } + } + + const response = await this.api.get({ path: 'repos', params }, jwt); + const raw = listReposResponseSchema.parse(await response.json()); + return transformListReposResult({ + ...raw, + next_cursor: raw.next_cursor ?? undefined, + }); + } + + /** + * Find a repository by ID + * @param options The search options + * @returns The found repository + */ + async findOne(options: FindOneOptions): Promise { + const jwt = await this.generateJWT(options.id, { + permissions: ['git:read'], + ttl: DEFAULT_TOKEN_TTL_SECONDS, + }); + + // Allow 404 to indicate "not found" without throwing + const resp = await this.api.get('repo', jwt, { allowedStatus: [404] }); + if (resp.status === 404) { + return null; + } + const body = (await resp.json()) as { default_branch?: string }; + const defaultBranch = body.default_branch ?? 'main'; + return new RepoImpl(options.id, defaultBranch, this.options, this.generateJWT.bind(this)); + } + + /** + * Delete a repository by ID + * @param options The delete options containing the repo ID + * @returns The deletion result + */ + async deleteRepo(options: DeleteRepoOptions): Promise { + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(options.id, { + permissions: ['repo:write'], + ttl, + }); + + // Allow 404 and 409 for clearer error handling + const resp = await this.api.delete('repos/delete', jwt, { allowedStatus: [404, 409] }); + if (resp.status === 404) { + throw new Error('Repository not found'); + } + if (resp.status === 409) { + throw new Error('Repository already deleted'); + } + + const body = (await resp.json()) as { repo_id: string; message: string }; + return { + repoId: body.repo_id, + message: body.message, + }; + } + + /** + * Get the current configuration + * @returns The client configuration + */ + getConfig(): GitStorageOptions { + return { ...this.options }; + } + + /** + * Generate a JWT token for git storage URL authentication + * @private + */ + private async generateJWT(repoId: string, options?: GetRemoteURLOptions): Promise { + // Default permissions and TTL + const permissions = options?.permissions || ['git:write', 'git:read']; + const ttl = resolveInvocationTtlSeconds(options, this.options.defaultTTL ?? 365 * 24 * 60 * 60); + + // Create the JWT payload + const now = Math.floor(Date.now() / 1000); + const payload = { + iss: this.options.name, + sub: '@pierre/storage', + repo: repoId, + scopes: permissions, + iat: now, + exp: now + ttl, + }; + + // Sign the JWT with the key as the secret + // Using HS256 for symmetric signing with the key + const key = await importPKCS8(this.options.key, 'ES256'); + // Sign the JWT with the key as the secret + const jwt = await new SignJWT(payload) + .setProtectedHeader({ alg: 'ES256', typ: 'JWT' }) + .sign(key); + + return jwt; + } +} + +// Export a default client factory +export function createClient(options: GitStorageOptions): GitStorage { + return new GitStorage(options); +} + +// Export CodeStorage as an alias for GitStorage +export { GitStorage as CodeStorage }; + +// Type alias for backward compatibility +export type StorageOptions = GitStorageOptions; diff --git a/packages/git-storage-sdk-node/src/schemas.ts b/packages/git-storage-sdk-node/src/schemas.ts new file mode 100644 index 000000000..5a7989f02 --- /dev/null +++ b/packages/git-storage-sdk-node/src/schemas.ts @@ -0,0 +1,217 @@ +import { z } from 'zod'; + +export const listFilesResponseSchema = z.object({ + paths: z.array(z.string()), + ref: z.string(), +}); + +export const branchInfoSchema = z.object({ + cursor: z.string(), + name: z.string(), + head_sha: z.string(), + created_at: z.string(), +}); + +export const listBranchesResponseSchema = z.object({ + branches: z.array(branchInfoSchema), + next_cursor: z.string().nullable().optional(), + has_more: z.boolean(), +}); + +export const commitInfoRawSchema = z.object({ + sha: z.string(), + message: z.string(), + author_name: z.string(), + author_email: z.string(), + committer_name: z.string(), + committer_email: z.string(), + date: z.string(), +}); + +export const listCommitsResponseSchema = z.object({ + commits: z.array(commitInfoRawSchema), + next_cursor: z.string().nullable().optional(), + has_more: z.boolean(), +}); + +export const repoBaseInfoSchema = z.object({ + provider: z.string(), + owner: z.string(), + name: z.string(), +}); + +export const repoInfoSchema = z.object({ + repo_id: z.string(), + url: z.string(), + default_branch: z.string(), + created_at: z.string(), + base_repo: repoBaseInfoSchema.optional().nullable(), +}); + +export const listReposResponseSchema = z.object({ + repos: z.array(repoInfoSchema), + next_cursor: z.string().nullable().optional(), + has_more: z.boolean(), +}); + +export const noteReadResponseSchema = z.object({ + sha: z.string(), + note: z.string(), + ref_sha: z.string(), +}); + +export const noteResultSchema = z.object({ + success: z.boolean(), + status: z.string(), + message: z.string().optional(), +}); + +export const noteWriteResponseSchema = z.object({ + sha: z.string(), + target_ref: z.string(), + base_commit: z.string().optional(), + new_ref_sha: z.string(), + result: noteResultSchema, +}); + +export const diffStatsSchema = z.object({ + files: z.number(), + additions: z.number(), + deletions: z.number(), + changes: z.number(), +}); + +export const diffFileRawSchema = z.object({ + path: z.string(), + state: z.string(), + old_path: z.string().nullable().optional(), + raw: z.string(), + bytes: z.number(), + is_eof: z.boolean(), +}); + +export const filteredFileRawSchema = z.object({ + path: z.string(), + state: z.string(), + old_path: z.string().nullable().optional(), + bytes: z.number(), + is_eof: z.boolean(), +}); + +export const branchDiffResponseSchema = z.object({ + branch: z.string(), + base: z.string(), + stats: diffStatsSchema, + files: z.array(diffFileRawSchema), + filtered_files: z.array(filteredFileRawSchema), +}); + +export const commitDiffResponseSchema = z.object({ + sha: z.string(), + stats: diffStatsSchema, + files: z.array(diffFileRawSchema), + filtered_files: z.array(filteredFileRawSchema), +}); + +export const createBranchResponseSchema = z.object({ + message: z.string(), + target_branch: z.string(), + target_is_ephemeral: z.boolean(), + commit_sha: z.string().nullable().optional(), +}); + +export const refUpdateResultSchema = z.object({ + branch: z.string(), + old_sha: z.string(), + new_sha: z.string(), + success: z.boolean(), + status: z.string(), + message: z.string().optional(), +}); + +export const commitPackCommitSchema = z.object({ + commit_sha: z.string(), + tree_sha: z.string(), + target_branch: z.string(), + pack_bytes: z.number(), + blob_count: z.number(), +}); + +export const restoreCommitCommitSchema = commitPackCommitSchema.omit({ blob_count: true }); + +export const refUpdateResultWithOptionalsSchema = z.object({ + branch: z.string().optional(), + old_sha: z.string().optional(), + new_sha: z.string().optional(), + success: z.boolean().optional(), + status: z.string(), + message: z.string().optional(), +}); + +export const commitPackAckSchema = z.object({ + commit: commitPackCommitSchema, + result: refUpdateResultSchema, +}); + +export const restoreCommitAckSchema = z.object({ + commit: restoreCommitCommitSchema, + result: refUpdateResultSchema.extend({ success: z.literal(true) }), +}); + +export const commitPackResponseSchema = z.object({ + commit: commitPackCommitSchema.partial().optional().nullable(), + result: refUpdateResultWithOptionalsSchema, +}); + +export const restoreCommitResponseSchema = z.object({ + commit: restoreCommitCommitSchema.partial().optional().nullable(), + result: refUpdateResultWithOptionalsSchema, +}); + +export const grepLineSchema = z.object({ + line_number: z.number(), + text: z.string(), + type: z.string(), +}); + +export const grepFileMatchSchema = z.object({ + path: z.string(), + lines: z.array(grepLineSchema), +}); + +export const grepResponseSchema = z.object({ + query: z.object({ + pattern: z.string(), + case_sensitive: z.boolean(), + }), + repo: z.object({ + ref: z.string(), + commit: z.string(), + }), + matches: z.array(grepFileMatchSchema), + next_cursor: z.string().nullable().optional(), + has_more: z.boolean(), +}); + +export const errorEnvelopeSchema = z.object({ + error: z.string(), +}); + +export type ListFilesResponseRaw = z.infer; +export type RawBranchInfo = z.infer; +export type ListBranchesResponseRaw = z.infer; +export type RawCommitInfo = z.infer; +export type ListCommitsResponseRaw = z.infer; +export type RawRepoBaseInfo = z.infer; +export type RawRepoInfo = z.infer; +export type ListReposResponseRaw = z.infer; +export type NoteReadResponseRaw = z.infer; +export type NoteWriteResponseRaw = z.infer; +export type RawFileDiff = z.infer; +export type RawFilteredFile = z.infer; +export type GetBranchDiffResponseRaw = z.infer; +export type GetCommitDiffResponseRaw = z.infer; +export type CreateBranchResponseRaw = z.infer; +export type CommitPackAckRaw = z.infer; +export type RestoreCommitAckRaw = z.infer; +export type GrepResponseRaw = z.infer; diff --git a/packages/git-storage-sdk-node/src/stream-utils.ts b/packages/git-storage-sdk-node/src/stream-utils.ts new file mode 100644 index 000000000..af4078432 --- /dev/null +++ b/packages/git-storage-sdk-node/src/stream-utils.ts @@ -0,0 +1,255 @@ +import type { BlobLike, FileLike, ReadableStreamLike } from './types'; + +type NodeBuffer = Uint8Array & { toString(encoding?: string): string }; +interface NodeBufferConstructor { + from(data: Uint8Array): NodeBuffer; + from(data: string, encoding?: string): NodeBuffer; + isBuffer(value: unknown): value is NodeBuffer; +} + +const BufferCtor: NodeBufferConstructor | undefined = ( + globalThis as { Buffer?: NodeBufferConstructor } +).Buffer; + +export const MAX_CHUNK_BYTES = 4 * 1024 * 1024; + +export type ChunkSegment = { + chunk: Uint8Array; + eof: boolean; +}; + +export async function* chunkify(source: AsyncIterable): AsyncIterable { + let pending: Uint8Array | null = null; + let produced = false; + + for await (const value of source) { + const bytes = value; + + if (pending && pending.byteLength === MAX_CHUNK_BYTES) { + yield { chunk: pending, eof: false }; + produced = true; + pending = null; + } + + const merged: Uint8Array = pending ? concatChunks(pending, bytes) : bytes; + pending = null; + + let cursor: Uint8Array = merged; + while (cursor.byteLength > MAX_CHUNK_BYTES) { + const chunk: Uint8Array = cursor.slice(0, MAX_CHUNK_BYTES); + cursor = cursor.slice(MAX_CHUNK_BYTES); + yield { chunk, eof: false }; + produced = true; + } + + pending = cursor; + } + + if (pending) { + yield { chunk: pending, eof: true }; + produced = true; + } + + if (!produced) { + yield { chunk: new Uint8Array(0), eof: true }; + } +} + +export async function* toAsyncIterable( + source: + | string + | Uint8Array + | ArrayBuffer + | BlobLike + | FileLike + | ReadableStreamLike + | AsyncIterable + | Iterable, +): AsyncIterable { + if (typeof source === 'string') { + yield new TextEncoder().encode(source); + return; + } + if (source instanceof Uint8Array) { + yield source; + return; + } + if (source instanceof ArrayBuffer) { + yield new Uint8Array(source); + return; + } + if (ArrayBuffer.isView(source)) { + yield new Uint8Array(source.buffer, source.byteOffset, source.byteLength); + return; + } + if (isBlobLike(source)) { + const stream = source.stream(); + if (isAsyncIterable(stream)) { + for await (const chunk of stream as AsyncIterable) { + yield ensureUint8Array(chunk); + } + return; + } + if (isReadableStreamLike(stream)) { + yield* readReadableStream(stream); + return; + } + } + if (isReadableStreamLike(source)) { + yield* readReadableStream(source); + return; + } + if (isAsyncIterable(source)) { + for await (const chunk of source as AsyncIterable) { + yield ensureUint8Array(chunk); + } + return; + } + if (isIterable(source)) { + for (const chunk of source as Iterable) { + yield ensureUint8Array(chunk); + } + return; + } + throw new Error('Unsupported content source; expected binary data'); +} + +export function base64Encode(bytes: Uint8Array): string { + if (BufferCtor) { + return BufferCtor.from(bytes).toString('base64'); + } + let binary = ''; + for (let i = 0; i < bytes.byteLength; i++) { + binary += String.fromCharCode(bytes[i]); + } + const btoaFn = (globalThis as { btoa?: (data: string) => string }).btoa; + if (typeof btoaFn === 'function') { + return btoaFn(binary); + } + throw new Error('Base64 encoding is not supported in this environment'); +} + +export function requiresDuplex(body: unknown): boolean { + if (!body || typeof body !== 'object') { + return false; + } + + if (typeof (body as { [Symbol.asyncIterator]?: unknown })[Symbol.asyncIterator] === 'function') { + return true; + } + + const readableStreamCtor = ( + globalThis as { + ReadableStream?: new (...args: unknown[]) => unknown; + } + ).ReadableStream; + if (readableStreamCtor && body instanceof readableStreamCtor) { + return true; + } + + return false; +} + +export function toRequestBody(iterable: AsyncIterable): unknown { + const readableStreamCtor = ( + globalThis as { ReadableStream?: new (underlyingSource: unknown) => unknown } + ).ReadableStream; + if (typeof readableStreamCtor === 'function') { + const iterator = iterable[Symbol.asyncIterator](); + return new readableStreamCtor({ + async pull(controller: { enqueue(chunk: Uint8Array): void; close(): void }) { + const { value, done } = await iterator.next(); + if (done) { + controller.close(); + return; + } + controller.enqueue(value!); + }, + async cancel(reason: unknown) { + if (typeof iterator.return === 'function') { + await iterator.return(reason); + } + }, + }); + } + return iterable; +} + +async function* readReadableStream(stream: ReadableStreamLike): AsyncIterable { + const reader = stream.getReader(); + try { + while (true) { + const { value, done } = await reader.read(); + if (done) { + break; + } + if (value !== undefined) { + yield ensureUint8Array(value); + } + } + } finally { + reader.releaseLock?.(); + } +} + +function ensureUint8Array(value: unknown): Uint8Array { + if (value instanceof Uint8Array) { + return value; + } + if (value instanceof ArrayBuffer) { + return new Uint8Array(value); + } + if (ArrayBuffer.isView(value)) { + return new Uint8Array(value.buffer, value.byteOffset, value.byteLength); + } + if (typeof value === 'string') { + return new TextEncoder().encode(value); + } + if (BufferCtor && BufferCtor.isBuffer(value)) { + return value as Uint8Array; + } + throw new Error('Unsupported chunk type; expected binary data'); +} + +function isBlobLike(value: unknown): value is BlobLike { + return ( + typeof value === 'object' && value !== null && typeof (value as BlobLike).stream === 'function' + ); +} + +function isReadableStreamLike(value: unknown): value is ReadableStreamLike { + return ( + typeof value === 'object' && + value !== null && + typeof (value as ReadableStreamLike).getReader === 'function' + ); +} + +function isAsyncIterable(value: unknown): value is AsyncIterable { + return ( + typeof value === 'object' && + value !== null && + Symbol.asyncIterator in (value as Record) + ); +} + +function isIterable(value: unknown): value is Iterable { + return ( + typeof value === 'object' && + value !== null && + Symbol.iterator in (value as Record) + ); +} + +function concatChunks(a: Uint8Array, b: Uint8Array): Uint8Array { + if (a.byteLength === 0) { + return b; + } + if (b.byteLength === 0) { + return a; + } + const merged = new Uint8Array(a.byteLength + b.byteLength); + merged.set(a, 0); + merged.set(b, a.byteLength); + return merged; +} diff --git a/packages/git-storage-sdk-node/src/types.ts b/packages/git-storage-sdk-node/src/types.ts new file mode 100644 index 000000000..3e28743fd --- /dev/null +++ b/packages/git-storage-sdk-node/src/types.ts @@ -0,0 +1,634 @@ +/** + * Type definitions for Pierre Git Storage SDK + */ + +import type { + CreateBranchResponseRaw, + GetBranchDiffResponseRaw, + GetCommitDiffResponseRaw, + ListBranchesResponseRaw, + ListCommitsResponseRaw, + ListFilesResponseRaw, + ListReposResponseRaw, + NoteReadResponseRaw, + NoteWriteResponseRaw, + RawBranchInfo as SchemaRawBranchInfo, + RawCommitInfo as SchemaRawCommitInfo, + RawFileDiff as SchemaRawFileDiff, + RawFilteredFile as SchemaRawFilteredFile, + RawRepoBaseInfo as SchemaRawRepoBaseInfo, + RawRepoInfo as SchemaRawRepoInfo, +} from './schemas'; + +export interface OverrideableGitStorageOptions { + apiBaseUrl?: string; + storageBaseUrl?: string; + apiVersion?: ValidAPIVersion; + defaultTTL?: number; +} + +export interface GitStorageOptions extends OverrideableGitStorageOptions { + key: string; + name: string; + defaultTTL?: number; +} + +export type ValidAPIVersion = 1; + +export interface GetRemoteURLOptions { + permissions?: ('git:write' | 'git:read' | 'repo:write' | 'org:read')[]; + ttl?: number; +} + +export interface Repo { + id: string; + defaultBranch: string; + getRemoteURL(options?: GetRemoteURLOptions): Promise; + getEphemeralRemoteURL(options?: GetRemoteURLOptions): Promise; + + getFileStream(options: GetFileOptions): Promise; + listFiles(options?: ListFilesOptions): Promise; + listBranches(options?: ListBranchesOptions): Promise; + listCommits(options?: ListCommitsOptions): Promise; + getNote(options: GetNoteOptions): Promise; + createNote(options: CreateNoteOptions): Promise; + appendNote(options: AppendNoteOptions): Promise; + deleteNote(options: DeleteNoteOptions): Promise; + getBranchDiff(options: GetBranchDiffOptions): Promise; + getCommitDiff(options: GetCommitDiffOptions): Promise; + grep(options: GrepOptions): Promise; + pullUpstream(options?: PullUpstreamOptions): Promise; + restoreCommit(options: RestoreCommitOptions): Promise; + createBranch(options: CreateBranchOptions): Promise; + createCommit(options: CreateCommitOptions): CommitBuilder; + createCommitFromDiff(options: CreateCommitFromDiffOptions): Promise; +} + +export type ValidMethod = 'GET' | 'POST' | 'PUT' | 'DELETE'; +type SimplePath = string; +type ComplexPath = { + path: string; + params?: Record; + body?: Record; +}; +export type ValidPath = SimplePath | ComplexPath; + +interface GitStorageInvocationOptions { + ttl?: number; +} + +export interface FindOneOptions { + id: string; +} + +export type SupportedRepoProvider = 'github'; + +export interface GitHubBaseRepo { + /** + * @default github + */ + provider?: SupportedRepoProvider; + owner: string; + name: string; + defaultBranch?: string; +} + +export interface ForkBaseRepo { + id: string; + ref?: string; + sha?: string; +} + +export type BaseRepo = GitHubBaseRepo | ForkBaseRepo; + +export interface ListReposOptions extends GitStorageInvocationOptions { + cursor?: string; + limit?: number; +} + +export type RawRepoBaseInfo = SchemaRawRepoBaseInfo; + +export interface RepoBaseInfo { + provider: string; + owner: string; + name: string; +} + +export type RawRepoInfo = SchemaRawRepoInfo; + +export interface RepoInfo { + repoId: string; + url: string; + defaultBranch: string; + createdAt: string; + baseRepo?: RepoBaseInfo; +} + +export type ListReposResponse = ListReposResponseRaw; + +export interface ListReposResult { + repos: RepoInfo[]; + nextCursor?: string; + hasMore: boolean; +} + +export interface CreateRepoOptions extends GitStorageInvocationOptions { + id?: string; + baseRepo?: BaseRepo; + defaultBranch?: string; +} + +export interface DeleteRepoOptions extends GitStorageInvocationOptions { + id: string; +} + +export interface DeleteRepoResult { + repoId: string; + message: string; +} + +// Get File API types +export interface GetFileOptions extends GitStorageInvocationOptions { + path: string; + ref?: string; + ephemeral?: boolean; + ephemeralBase?: boolean; +} + +export interface PullUpstreamOptions extends GitStorageInvocationOptions { + ref?: string; +} + +// List Files API types +export interface ListFilesOptions extends GitStorageInvocationOptions { + ref?: string; + ephemeral?: boolean; +} + +export type ListFilesResponse = ListFilesResponseRaw; + +export interface ListFilesResult { + paths: string[]; + ref: string; +} + +// List Branches API types +export interface ListBranchesOptions extends GitStorageInvocationOptions { + cursor?: string; + limit?: number; +} + +export type RawBranchInfo = SchemaRawBranchInfo; + +export interface BranchInfo { + cursor: string; + name: string; + headSha: string; + createdAt: string; +} + +export type ListBranchesResponse = ListBranchesResponseRaw; + +export interface ListBranchesResult { + branches: BranchInfo[]; + nextCursor?: string; + hasMore: boolean; +} + +// Create Branch API types +export interface CreateBranchOptions extends GitStorageInvocationOptions { + baseBranch: string; + targetBranch: string; + baseIsEphemeral?: boolean; + targetIsEphemeral?: boolean; +} + +export type CreateBranchResponse = CreateBranchResponseRaw; + +export interface CreateBranchResult { + message: string; + targetBranch: string; + targetIsEphemeral: boolean; + commitSha?: string; +} + +// List Commits API types +export interface ListCommitsOptions extends GitStorageInvocationOptions { + branch?: string; + cursor?: string; + limit?: number; +} + +export type RawCommitInfo = SchemaRawCommitInfo; + +export interface CommitInfo { + sha: string; + message: string; + authorName: string; + authorEmail: string; + committerName: string; + committerEmail: string; + date: Date; + rawDate: string; +} + +export type ListCommitsResponse = ListCommitsResponseRaw; + +export interface ListCommitsResult { + commits: CommitInfo[]; + nextCursor?: string; + hasMore: boolean; +} + +// Git notes API types +export interface GetNoteOptions extends GitStorageInvocationOptions { + sha: string; +} + +export type GetNoteResponse = NoteReadResponseRaw; + +export interface GetNoteResult { + sha: string; + note: string; + refSha: string; +} + +interface NoteWriteBaseOptions extends GitStorageInvocationOptions { + sha: string; + note: string; + expectedRefSha?: string; + author?: CommitSignature; +} + +export type CreateNoteOptions = NoteWriteBaseOptions; + +export type AppendNoteOptions = NoteWriteBaseOptions; + +export interface DeleteNoteOptions extends GitStorageInvocationOptions { + sha: string; + expectedRefSha?: string; + author?: CommitSignature; +} + +export interface NoteWriteResultPayload { + success: boolean; + status: string; + message?: string; +} + +export type NoteWriteResponse = NoteWriteResponseRaw; + +export interface NoteWriteResult { + sha: string; + targetRef: string; + baseCommit?: string; + newRefSha: string; + result: NoteWriteResultPayload; +} + +// Branch Diff API types +export interface GetBranchDiffOptions extends GitStorageInvocationOptions { + branch: string; + base?: string; + ephemeral?: boolean; + ephemeralBase?: boolean; + /** Optional paths to filter the diff to specific files */ + paths?: string[]; +} + +export type GetBranchDiffResponse = GetBranchDiffResponseRaw; + +export interface GetBranchDiffResult { + branch: string; + base: string; + stats: DiffStats; + files: FileDiff[]; + filteredFiles: FilteredFile[]; +} + +// Commit Diff API types +export interface GetCommitDiffOptions extends GitStorageInvocationOptions { + sha: string; + baseSha?: string; + /** Optional paths to filter the diff to specific files */ + paths?: string[]; +} + +export type GetCommitDiffResponse = GetCommitDiffResponseRaw; + +export interface GetCommitDiffResult { + sha: string; + stats: DiffStats; + files: FileDiff[]; + filteredFiles: FilteredFile[]; +} + +// Grep API types +export interface GrepOptions extends GitStorageInvocationOptions { + ref?: string; + paths?: string[]; + query: { + pattern: string; + /** + * Default is case-sensitive. + * When omitted, the server default is used. + */ + caseSensitive?: boolean; + }; + fileFilters?: { + includeGlobs?: string[]; + excludeGlobs?: string[]; + extensionFilters?: string[]; + }; + context?: { + before?: number; + after?: number; + }; + limits?: { + maxLines?: number; + maxMatchesPerFile?: number; + }; + pagination?: { + cursor?: string; + limit?: number; + }; +} + +export interface GrepLine { + lineNumber: number; + text: string; + type: string; +} + +export interface GrepFileMatch { + path: string; + lines: GrepLine[]; +} + +export interface GrepResult { + query: { + pattern: string; + caseSensitive: boolean; + }; + repo: { + ref: string; + commit: string; + }; + matches: GrepFileMatch[]; + nextCursor?: string; + hasMore: boolean; +} + +// Shared diff types +export interface DiffStats { + files: number; + additions: number; + deletions: number; + changes: number; +} + +export type RawFileDiff = SchemaRawFileDiff; + +export type RawFilteredFile = SchemaRawFilteredFile; + +export type DiffFileState = + | 'added' + | 'modified' + | 'deleted' + | 'renamed' + | 'copied' + | 'type_changed' + | 'unmerged' + | 'unknown'; + +export interface DiffFileBase { + path: string; + state: DiffFileState; + rawState: string; + oldPath?: string; + bytes: number; + isEof: boolean; +} + +export interface FileDiff extends DiffFileBase { + raw: string; +} + +export interface FilteredFile extends DiffFileBase {} + +interface CreateCommitBaseOptions extends GitStorageInvocationOptions { + commitMessage: string; + expectedHeadSha?: string; + baseBranch?: string; + ephemeral?: boolean; + ephemeralBase?: boolean; + author: CommitSignature; + committer?: CommitSignature; + signal?: AbortSignal; +} + +export interface CreateCommitBranchOptions extends CreateCommitBaseOptions { + targetBranch: string; + targetRef?: never; +} + +/** + * @deprecated Use {@link CreateCommitBranchOptions} instead. + */ +export interface LegacyCreateCommitOptions extends CreateCommitBaseOptions { + targetBranch?: never; + targetRef: string; +} + +export type CreateCommitOptions = CreateCommitBranchOptions | LegacyCreateCommitOptions; + +export interface CommitSignature { + name: string; + email: string; +} + +export interface ReadableStreamReaderLike { + read(): Promise<{ value?: T; done: boolean }>; + releaseLock?(): void; +} + +export interface ReadableStreamLike { + getReader(): ReadableStreamReaderLike; +} + +export interface BlobLike { + stream(): unknown; +} + +export interface FileLike extends BlobLike { + name: string; + lastModified?: number; +} + +export type GitFileMode = '100644' | '100755' | '120000' | '160000'; + +export type TextEncoding = + | 'ascii' + | 'utf8' + | 'utf-8' + | 'utf16le' + | 'utf-16le' + | 'ucs2' + | 'ucs-2' + | 'base64' + | 'base64url' + | 'latin1' + | 'binary' + | 'hex'; + +export type CommitFileSource = + | string + | Uint8Array + | ArrayBuffer + | BlobLike + | FileLike + | ReadableStreamLike + | AsyncIterable + | Iterable; + +export interface CommitFileOptions { + mode?: GitFileMode; +} + +export interface CommitTextFileOptions extends CommitFileOptions { + encoding?: TextEncoding; +} + +export interface CommitBuilder { + addFile(path: string, source: CommitFileSource, options?: CommitFileOptions): CommitBuilder; + addFileFromString(path: string, contents: string, options?: CommitTextFileOptions): CommitBuilder; + deletePath(path: string): CommitBuilder; + send(): Promise; +} + +export type DiffSource = CommitFileSource; + +export interface CreateCommitFromDiffOptions extends GitStorageInvocationOptions { + targetBranch: string; + commitMessage: string; + diff: DiffSource; + expectedHeadSha?: string; + baseBranch?: string; + ephemeral?: boolean; + ephemeralBase?: boolean; + author: CommitSignature; + committer?: CommitSignature; + signal?: AbortSignal; +} + +export interface RefUpdate { + branch: string; + oldSha: string; + newSha: string; +} + +export type RefUpdateReason = + | 'precondition_failed' + | 'conflict' + | 'not_found' + | 'invalid' + | 'timeout' + | 'unauthorized' + | 'forbidden' + | 'unavailable' + | 'internal' + | 'failed' + | 'unknown'; + +export interface CommitResult { + commitSha: string; + treeSha: string; + targetBranch: string; + packBytes: number; + blobCount: number; + refUpdate: RefUpdate; +} + +export interface RestoreCommitOptions extends GitStorageInvocationOptions { + targetBranch: string; + targetCommitSha: string; + commitMessage?: string; + expectedHeadSha?: string; + author: CommitSignature; + committer?: CommitSignature; +} + +export interface RestoreCommitResult { + commitSha: string; + treeSha: string; + targetBranch: string; + packBytes: number; + refUpdate: RefUpdate; +} + +// Webhook types +export interface WebhookValidationOptions { + /** + * Maximum age of webhook in seconds (default: 300 seconds / 5 minutes) + * Set to 0 to disable timestamp validation + */ + maxAgeSeconds?: number; +} + +export interface WebhookValidationResult { + /** + * Whether the webhook signature and timestamp are valid + */ + valid: boolean; + /** + * Error message if validation failed + */ + error?: string; + /** + * The parsed webhook event type (e.g., "push") + */ + eventType?: string; + /** + * The timestamp from the signature (Unix seconds) + */ + timestamp?: number; +} + +// Webhook event payloads +export interface RawWebhookPushEvent { + repository: { + id: string; + url: string; + }; + ref: string; + before: string; + after: string; + customer_id: string; + pushed_at: string; // RFC3339 timestamp +} + +export interface WebhookPushEvent { + type: 'push'; + repository: { + id: string; + url: string; + }; + ref: string; + before: string; + after: string; + customerId: string; + pushedAt: Date; + rawPushedAt: string; +} + +export interface WebhookUnknownEvent { + type: string; + raw: unknown; +} + +export type WebhookEventPayload = WebhookPushEvent | WebhookUnknownEvent; + +export interface ParsedWebhookSignature { + timestamp: string; + signature: string; +} diff --git a/packages/git-storage-sdk-node/src/util.ts b/packages/git-storage-sdk-node/src/util.ts new file mode 100644 index 000000000..0586ba11f --- /dev/null +++ b/packages/git-storage-sdk-node/src/util.ts @@ -0,0 +1,44 @@ +export function timingSafeEqual(a: string | Uint8Array, b: string | Uint8Array): boolean { + const bufferA = typeof a === 'string' ? new TextEncoder().encode(a) : a; + const bufferB = typeof b === 'string' ? new TextEncoder().encode(b) : b; + + if (bufferA.length !== bufferB.length) return false; + + let result = 0; + for (let i = 0; i < bufferA.length; i++) { + result |= bufferA[i] ^ bufferB[i]; + } + return result === 0; +} + +export async function getEnvironmentCrypto() { + if (!globalThis.crypto) { + const { webcrypto } = await import('node:crypto'); + return webcrypto; + } + return globalThis.crypto; +} + +export async function createHmac(algorithm: string, secret: string, data: string): Promise { + if (algorithm !== 'sha256') { + throw new Error('Only sha256 algorithm is supported'); + } + if (!secret || secret.length === 0) { + throw new Error('Secret is required'); + } + + const crypto = await getEnvironmentCrypto(); + const encoder = new TextEncoder(); + const key = await crypto.subtle.importKey( + 'raw', + encoder.encode(secret), + { name: 'HMAC', hash: 'SHA-256' }, + false, + ['sign'], + ); + + const signature = await crypto.subtle.sign('HMAC', key, encoder.encode(data)); + return Array.from(new Uint8Array(signature)) + .map((b) => b.toString(16).padStart(2, '0')) + .join(''); +} diff --git a/packages/git-storage-sdk-node/src/version.ts b/packages/git-storage-sdk-node/src/version.ts new file mode 100644 index 000000000..959ccdc45 --- /dev/null +++ b/packages/git-storage-sdk-node/src/version.ts @@ -0,0 +1,8 @@ +import packageJson from '../package.json'; + +export const PACKAGE_NAME = 'code-storage-sdk'; +export const PACKAGE_VERSION = packageJson.version; + +export function getUserAgent(): string { + return `${PACKAGE_NAME}/${PACKAGE_VERSION}`; +} diff --git a/packages/git-storage-sdk-node/src/webhook.ts b/packages/git-storage-sdk-node/src/webhook.ts new file mode 100644 index 000000000..1c9a99493 --- /dev/null +++ b/packages/git-storage-sdk-node/src/webhook.ts @@ -0,0 +1,323 @@ +/** + * Webhook validation utilities for Pierre Git Storage + */ + +import type { + ParsedWebhookSignature, + RawWebhookPushEvent, + WebhookEventPayload, + WebhookPushEvent, + WebhookValidationOptions, + WebhookValidationResult, +} from './types'; + +import { createHmac, timingSafeEqual } from './util'; + +const DEFAULT_MAX_AGE_SECONDS = 300; // 5 minutes + +/** + * Parse the X-Pierre-Signature header + * Format: t=,sha256= + */ +export function parseSignatureHeader(header: string): ParsedWebhookSignature | null { + if (!header || typeof header !== 'string') { + return null; + } + + let timestamp = ''; + let signature = ''; + + // Split by comma and parse each element + const elements = header.split(','); + for (const element of elements) { + const trimmedElement = element.trim(); + const parts = trimmedElement.split('=', 2); + if (parts.length !== 2) { + continue; + } + + const [key, value] = parts; + switch (key) { + case 't': + timestamp = value; + break; + case 'sha256': + signature = value; + break; + } + } + + if (!timestamp || !signature) { + return null; + } + + return { timestamp, signature }; +} + +/** + * Validate a webhook signature and timestamp + * + * @param payload - The raw webhook payload (request body) + * @param signatureHeader - The X-Pierre-Signature header value + * @param secret - The webhook secret for HMAC verification + * @param options - Validation options + * @returns Validation result with details + * + * @example + * ```typescript + * const result = await validateWebhookSignature( + * requestBody, + * request.headers['x-pierre-signature'], + * webhookSecret + * ); + * + * if (!result.valid) { + * console.error('Invalid webhook:', result.error); + * return; + * } + * ``` + */ +export async function validateWebhookSignature( + payload: string | Buffer, + signatureHeader: string, + secret: string, + options: WebhookValidationOptions = {}, +): Promise { + if (!secret || secret.length === 0) { + return { + valid: false, + error: 'Empty secret is not allowed', + }; + } + + // Parse the signature header + const parsed = parseSignatureHeader(signatureHeader); + if (!parsed) { + return { + valid: false, + error: 'Invalid signature header format', + }; + } + + // Parse timestamp + const timestamp = Number.parseInt(parsed.timestamp, 10); + if (isNaN(timestamp)) { + return { + valid: false, + error: 'Invalid timestamp in signature', + }; + } + + // Validate timestamp age (prevent replay attacks) + const maxAge = options.maxAgeSeconds ?? DEFAULT_MAX_AGE_SECONDS; + if (maxAge > 0) { + const now = Math.floor(Date.now() / 1000); + const age = now - timestamp; + + if (age > maxAge) { + return { + valid: false, + error: `Webhook timestamp too old (${age} seconds)`, + timestamp, + }; + } + + // Also reject timestamps from the future (clock skew tolerance of 60 seconds) + if (age < -60) { + return { + valid: false, + error: 'Webhook timestamp is in the future', + timestamp, + }; + } + } + + // Convert payload to string if it's a Buffer + const payloadStr = typeof payload === 'string' ? payload : payload.toString('utf8'); + + // Compute expected signature + // Format: HMAC-SHA256(secret, timestamp + "." + payload) + const signedData = `${parsed.timestamp}.${payloadStr}`; + const expectedSignature = await createHmac('sha256', secret, signedData); + + // Compare signatures using constant-time comparison + const expectedBuffer = Buffer.from(expectedSignature); + const actualBuffer = Buffer.from(parsed.signature); + + // Ensure both buffers are the same length for timing-safe comparison + if (expectedBuffer.length !== actualBuffer.length) { + return { + valid: false, + error: 'Invalid signature', + timestamp, + }; + } + + const signaturesMatch = timingSafeEqual(expectedBuffer, actualBuffer); + if (!signaturesMatch) { + return { + valid: false, + error: 'Invalid signature', + timestamp, + }; + } + + return { + valid: true, + timestamp, + }; +} + +/** + * Validate a webhook request + * + * This is a convenience function that validates the signature and parses the payload. + * + * @param payload - The raw webhook payload (request body) + * @param headers - The request headers (must include x-pierre-signature and x-pierre-event) + * @param secret - The webhook secret for HMAC verification + * @param options - Validation options + * @returns The parsed webhook payload if valid, or validation error + * + * @example + * ```typescript + * const result = await validateWebhook( + * request.body, + * request.headers, + * process.env.WEBHOOK_SECRET + * ); + * + * if (!result.valid) { + * return new Response('Invalid webhook', { status: 401 }); + * } + * + * // Type-safe access to the webhook payload + * console.log('Push event:', result.payload); + * ``` + */ +export async function validateWebhook( + payload: string | Buffer, + headers: Record, + secret: string, + options: WebhookValidationOptions = {}, +): Promise { + // Get signature header + const signatureHeader = headers['x-pierre-signature'] || headers['X-Pierre-Signature']; + if (!signatureHeader || Array.isArray(signatureHeader)) { + return { + valid: false, + error: 'Missing or invalid X-Pierre-Signature header', + }; + } + + // Get event type header + const eventType = headers['x-pierre-event'] || headers['X-Pierre-Event']; + if (!eventType || Array.isArray(eventType)) { + return { + valid: false, + error: 'Missing or invalid X-Pierre-Event header', + }; + } + + // Validate signature + const validationResult = await validateWebhookSignature( + payload, + signatureHeader, + secret, + options, + ); + + if (!validationResult.valid) { + return validationResult; + } + + // Parse payload + const payloadStr = typeof payload === 'string' ? payload : payload.toString('utf8'); + let parsedJson: unknown; + try { + parsedJson = JSON.parse(payloadStr); + } catch { + return { + valid: false, + error: 'Invalid JSON payload', + timestamp: validationResult.timestamp, + }; + } + + const conversion = convertWebhookPayload(String(eventType), parsedJson); + if (!conversion.valid) { + return { + valid: false, + error: conversion.error, + timestamp: validationResult.timestamp, + }; + } + + return { + valid: true, + eventType, + timestamp: validationResult.timestamp, + payload: conversion.payload, + }; +} + +function convertWebhookPayload( + eventType: string, + raw: unknown, +): { valid: true; payload: WebhookEventPayload } | { valid: false; error: string } { + if (eventType === 'push') { + if (!isRawWebhookPushEvent(raw)) { + return { + valid: false, + error: 'Invalid push payload', + }; + } + return { + valid: true, + payload: transformPushEvent(raw), + }; + } + const fallbackPayload = { type: eventType, raw }; + return { + valid: true, + payload: fallbackPayload, + }; +} + +function transformPushEvent(raw: RawWebhookPushEvent): WebhookPushEvent { + return { + type: 'push' as const, + repository: { + id: raw.repository.id, + url: raw.repository.url, + }, + ref: raw.ref, + before: raw.before, + after: raw.after, + customerId: raw.customer_id, + pushedAt: new Date(raw.pushed_at), + rawPushedAt: raw.pushed_at, + }; +} + +function isRawWebhookPushEvent(value: unknown): value is RawWebhookPushEvent { + if (!isRecord(value)) { + return false; + } + if (!isRecord(value.repository)) { + return false; + } + return ( + typeof value.repository.id === 'string' && + typeof value.repository.url === 'string' && + typeof value.ref === 'string' && + typeof value.before === 'string' && + typeof value.after === 'string' && + typeof value.customer_id === 'string' && + typeof value.pushed_at === 'string' + ); +} + +function isRecord(value: unknown): value is Record { + return typeof value === 'object' && value !== null; +} diff --git a/packages/git-storage-sdk-node/tests/commit-from-diff.test.ts b/packages/git-storage-sdk-node/tests/commit-from-diff.test.ts new file mode 100644 index 000000000..21a5ff2a1 --- /dev/null +++ b/packages/git-storage-sdk-node/tests/commit-from-diff.test.ts @@ -0,0 +1,343 @@ +import { ReadableStream } from 'node:stream/web'; +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { GitStorage, RefUpdateError } from '../src/index'; + +const key = `-----BEGIN PRIVATE KEY----- +MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgy3DPdzzsP6tOOvmo +rjbx6L7mpFmKKL2hNWNW3urkN8ehRANCAAQ7/DPhGH3kaWl0YEIO+W9WmhyCclDG +yTh6suablSura7ZDG8hpm3oNsq/ykC3Scfsw6ZTuuVuLlXKV/be/Xr0d +-----END PRIVATE KEY-----`; + +type MockFetch = ReturnType; + +function ensureMockFetch(): MockFetch { + const existing = globalThis.fetch as unknown; + if (existing && typeof existing === 'function' && 'mock' in (existing as any)) { + return existing as MockFetch; + } + const mock = vi.fn(); + vi.stubGlobal('fetch', mock); + return mock; +} + +async function readRequestBody(body: unknown): Promise { + if (!body) { + return ''; + } + if (typeof body === 'string') { + return body; + } + if (body instanceof Uint8Array) { + return new TextDecoder().decode(body); + } + if (isReadableStream(body)) { + const reader = body.getReader(); + const chunks: Uint8Array[] = []; + while (true) { + const { value, done } = await reader.read(); + if (done) { + break; + } + if (value) { + chunks.push(value); + } + } + reader.releaseLock?.(); + return decodeChunks(chunks); + } + if (isAsyncIterable(body)) { + const chunks: Uint8Array[] = []; + for await (const value of body as AsyncIterable) { + if (value instanceof Uint8Array) { + chunks.push(value); + } else if (typeof value === 'string') { + chunks.push(new TextEncoder().encode(value)); + } else if (value instanceof ArrayBuffer) { + chunks.push(new Uint8Array(value)); + } + } + return decodeChunks(chunks); + } + return ''; +} + +function isReadableStream(value: unknown): value is ReadableStream { + return ( + typeof value === 'object' && + value !== null && + typeof (value as ReadableStream).getReader === 'function' + ); +} + +function isAsyncIterable(value: unknown): value is AsyncIterable { + return ( + typeof value === 'object' && + value !== null && + Symbol.asyncIterator in (value as Record) + ); +} + +function decodeChunks(chunks: Uint8Array[]): string { + if (chunks.length === 0) { + return ''; + } + if (chunks.length === 1) { + return new TextDecoder().decode(chunks[0]); + } + const total = chunks.reduce((sum, chunk) => sum + chunk.byteLength, 0); + const combined = new Uint8Array(total); + let offset = 0; + for (const chunk of chunks) { + combined.set(chunk, offset); + offset += chunk.byteLength; + } + return new TextDecoder().decode(combined); +} + +describe('createCommitFromDiff', () => { + const mockFetch = ensureMockFetch(); + let randomSpy: ReturnType | undefined; + + beforeEach(() => { + mockFetch.mockReset(); + mockFetch.mockImplementation(() => + Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'repo-id', url: 'https://example.git' }), + }), + ); + randomSpy = vi + .spyOn(globalThis.crypto, 'randomUUID') + .mockImplementation(() => 'cid-fixed' as any); + }); + + afterEach(() => { + randomSpy?.mockRestore(); + }); + + it('streams metadata and diff chunks in NDJSON order', async () => { + const store = new GitStorage({ name: 'v0', key }); + + const commitAck = { + commit: { + commit_sha: 'def456', + tree_sha: 'abc123', + target_branch: 'main', + pack_bytes: 84, + blob_count: 0, + }, + result: { + branch: 'main', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'def456', + success: true, + status: 'ok', + }, + }; + + // createRepo call + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'repo-main', url: 'https://repo.git' }), + }), + ); + + // diff commit call + mockFetch.mockImplementationOnce(async (url, init) => { + expect(String(url)).toMatch(/\/api\/v1\/repos\/diff-commit$/); + expect(init?.method).toBe('POST'); + const headers = init?.headers as Record; + expect(headers.Authorization).toMatch(/^Bearer\s.+/); + expect(headers['Content-Type']).toBe('application/x-ndjson'); + + const body = await readRequestBody(init?.body); + const lines = body.trim().split('\n'); + expect(lines).toHaveLength(2); + + const metadataFrame = JSON.parse(lines[0]); + expect(metadataFrame.metadata).toEqual({ + target_branch: 'main', + expected_head_sha: 'abc123', + commit_message: 'Apply patch', + author: { + name: 'Author Name', + email: 'author@example.com', + }, + }); + + const chunkFrame = JSON.parse(lines[1]); + expect(chunkFrame.diff_chunk.eof).toBe(true); + const decoded = Buffer.from(chunkFrame.diff_chunk.data, 'base64').toString('utf8'); + expect(decoded).toBe('diff --git a/file.txt b/file.txt\n'); + + return { + ok: true, + status: 200, + json: async () => commitAck, + }; + }); + + const repo = await store.createRepo({ id: 'repo-main' }); + const result = await repo.createCommitFromDiff({ + targetBranch: 'main', + commitMessage: 'Apply patch', + expectedHeadSha: 'abc123', + author: { name: 'Author Name', email: 'author@example.com' }, + diff: 'diff --git a/file.txt b/file.txt\n', + }); + + expect(result).toEqual({ + commitSha: 'def456', + treeSha: 'abc123', + targetBranch: 'main', + packBytes: 84, + blobCount: 0, + refUpdate: { + branch: 'main', + oldSha: '0000000000000000000000000000000000000000', + newSha: 'def456', + }, + }); + }); + + it('requires diff content before sending', async () => { + const store = new GitStorage({ name: 'v0', key }); + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'repo-main', url: 'https://repo.git' }), + }), + ); + + const repo = await store.createRepo({ id: 'repo-main' }); + await expect( + repo.createCommitFromDiff({ + targetBranch: 'main', + commitMessage: 'Apply patch', + author: { name: 'Author', email: 'author@example.com' }, + diff: undefined as unknown as string, + }), + ).rejects.toThrow('createCommitFromDiff diff is required'); + }); + + it('converts error responses into RefUpdateError', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'repo-main', url: 'https://repo.git' }), + }), + ); + + mockFetch.mockImplementationOnce(async () => { + const response = { + ok: false, + status: 409, + statusText: 'Conflict', + async json() { + return { + result: { + status: 'conflict', + message: 'Head moved', + branch: 'main', + old_sha: 'abc', + new_sha: 'def', + }, + }; + }, + async text() { + return ''; + }, + clone() { + return this; + }, + }; + return response; + }); + + const repo = await store.createRepo({ id: 'repo-main' }); + + const promise = repo.createCommitFromDiff({ + targetBranch: 'refs/heads/main', + commitMessage: 'Apply patch', + expectedHeadSha: 'abc', + author: { name: 'Author', email: 'author@example.com' }, + diff: 'diff --git a/file.txt b/file.txt\n', + }); + + await expect(promise).rejects.toThrow(RefUpdateError); + + await promise.catch((error) => { + if (!(error instanceof RefUpdateError)) { + throw error; + } + expect(error.status).toBe('conflict'); + expect(error.refUpdate).toEqual({ + branch: 'main', + oldSha: 'abc', + newSha: 'def', + }); + }); + }); + + it('includes Code-Storage-Agent header in diff commit requests', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ repo_id: 'repo-user-agent', url: 'https://repo.git' }), + }), + ); + + const commitAck = { + commit: { + commit_sha: 'useragent123', + tree_sha: 'tree456', + target_branch: 'main', + pack_bytes: 42, + blob_count: 0, + }, + result: { + branch: 'main', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'useragent123', + success: true, + status: 'ok', + }, + }; + + let capturedHeaders: Record | undefined; + mockFetch.mockImplementationOnce(async (_url, init) => { + capturedHeaders = init?.headers as Record; + return { + ok: true, + status: 200, + json: async () => commitAck, + }; + }); + + const repo = await store.createRepo({ id: 'repo-user-agent' }); + await repo.createCommitFromDiff({ + targetBranch: 'main', + commitMessage: 'Test user agent', + author: { name: 'Author Name', email: 'author@example.com' }, + diff: 'diff --git a/test.txt b/test.txt\n', + }); + + expect(capturedHeaders).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toMatch(/code-storage-sdk\/\d+\.\d+\.\d+/); + }); +}); diff --git a/packages/git-storage-sdk-node/tests/commit.test.ts b/packages/git-storage-sdk-node/tests/commit.test.ts new file mode 100644 index 000000000..f2af5bdbe --- /dev/null +++ b/packages/git-storage-sdk-node/tests/commit.test.ts @@ -0,0 +1,836 @@ +import { ReadableStream } from 'node:stream/web'; +import { Blob } from 'buffer'; +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { GitStorage, RefUpdateError } from '../src/index'; + +const key = `-----BEGIN PRIVATE KEY----- +MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgy3DPdzzsP6tOOvmo +rjbx6L7mpFmKKL2hNWNW3urkN8ehRANCAAQ7/DPhGH3kaWl0YEIO+W9WmhyCclDG +yTh6suablSura7ZDG8hpm3oNsq/ykC3Scfsw6ZTuuVuLlXKV/be/Xr0d +-----END PRIVATE KEY-----`; + +const decodeJwtPayload = (jwt: string) => { + const parts = jwt.split('.'); + if (parts.length !== 3) { + throw new Error('Invalid JWT format'); + } + return JSON.parse(Buffer.from(parts[1], 'base64url').toString()); +}; + +const stripBearer = (value: string): string => value.replace(/^Bearer\s+/i, ''); + +const MAX_CHUNK_BYTES = 4 * 1024 * 1024; + +type MockFetch = ReturnType; + +function ensureMockFetch(): MockFetch { + const existing = globalThis.fetch as unknown; + if (existing && typeof existing === 'function' && 'mock' in (existing as any)) { + return existing as MockFetch; + } + const mock = vi.fn(); + vi.stubGlobal('fetch', mock); + return mock; +} + +async function readRequestBody(body: unknown): Promise { + if (!body) { + return ''; + } + if (typeof body === 'string') { + return body; + } + if (body instanceof Uint8Array) { + return new TextDecoder().decode(body); + } + if (isReadableStream(body)) { + const reader = body.getReader(); + const chunks: Uint8Array[] = []; + while (true) { + const { value, done } = await reader.read(); + if (done) { + break; + } + if (value) { + chunks.push(value); + } + } + reader.releaseLock?.(); + return decodeChunks(chunks); + } + if (isAsyncIterable(body)) { + const chunks: Uint8Array[] = []; + for await (const value of body as AsyncIterable) { + if (value instanceof Uint8Array) { + chunks.push(value); + } else if (typeof value === 'string') { + chunks.push(new TextEncoder().encode(value)); + } else if (value instanceof ArrayBuffer) { + chunks.push(new Uint8Array(value)); + } + } + return decodeChunks(chunks); + } + return ''; +} + +function isReadableStream(value: unknown): value is ReadableStream { + return ( + typeof value === 'object' && + value !== null && + typeof (value as ReadableStream).getReader === 'function' + ); +} + +function isAsyncIterable(value: unknown): value is AsyncIterable { + return ( + typeof value === 'object' && + value !== null && + Symbol.asyncIterator in (value as Record) + ); +} + +function decodeChunks(chunks: Uint8Array[]): string { + if (chunks.length === 0) { + return ''; + } + if (chunks.length === 1) { + return new TextDecoder().decode(chunks[0]); + } + const total = chunks.reduce((sum, chunk) => sum + chunk.byteLength, 0); + const combined = new Uint8Array(total); + let offset = 0; + for (const chunk of chunks) { + combined.set(chunk, offset); + offset += chunk.byteLength; + } + return new TextDecoder().decode(combined); +} + +describe('createCommit builder', () => { + const mockFetch = ensureMockFetch(); + let randomSpy: ReturnType | undefined; + + beforeEach(() => { + mockFetch.mockReset(); + mockFetch.mockImplementation(() => + Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'repo-id', url: 'https://example.git' }), + }), + ); + randomSpy = vi + .spyOn(globalThis.crypto, 'randomUUID') + .mockImplementation(() => 'cid-fixed' as any); + }); + + afterEach(() => { + randomSpy?.mockRestore(); + }); + + it('streams metadata and blob chunks in NDJSON order', async () => { + const store = new GitStorage({ name: 'v0', key }); + + const commitAck = { + commit: { + commit_sha: 'abc123', + tree_sha: 'def456', + target_branch: 'main', + pack_bytes: 42, + blob_count: 1, + }, + result: { + branch: 'main', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'abc123', + success: true, + status: 'ok', + }, + }; + + // createRepo call + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'repo-main', url: 'https://repo.git' }), + }), + ); + + // commit call + mockFetch.mockImplementationOnce(async (_url, init) => { + expect(init?.method).toBe('POST'); + const headers = init?.headers as Record; + expect(headers.Authorization).toMatch(/^Bearer\s.+/); + expect(headers['Content-Type']).toBe('application/x-ndjson'); + + const body = await readRequestBody(init?.body); + const lines = body.trim().split('\n'); + expect(lines).toHaveLength(2); + + const metadataFrame = JSON.parse(lines[0]); + expect(metadataFrame.metadata.commit_message).toBe('Update docs'); + expect(metadataFrame.metadata.author).toEqual({ + name: 'Author Name', + email: 'author@example.com', + }); + expect(metadataFrame.metadata.files).toEqual([ + expect.objectContaining({ + path: 'docs/readme.md', + operation: 'upsert', + content_id: 'cid-fixed', + }), + expect.objectContaining({ path: 'docs/old.txt', operation: 'delete' }), + ]); + expect(metadataFrame.metadata).not.toHaveProperty('ephemeral'); + expect(metadataFrame.metadata).not.toHaveProperty('ephemeral_base'); + + const chunkFrame = JSON.parse(lines[1]); + expect(chunkFrame.blob_chunk.content_id).toBe('cid-fixed'); + expect(chunkFrame.blob_chunk.eof).toBe(true); + const decoded = Buffer.from(chunkFrame.blob_chunk.data, 'base64').toString('utf8'); + expect(decoded).toBe('# v2.0.1\n- add streaming SDK\n'); + + return { + ok: true, + status: 200, + json: async () => commitAck, + }; + }); + + const repo = await store.createRepo({ id: 'repo-main' }); + const response = await repo + .createCommit({ + targetBranch: 'main', + commitMessage: 'Update docs', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + .addFileFromString('docs/readme.md', '# v2.0.1\n- add streaming SDK\n') + .deletePath('docs/old.txt') + .send(); + + expect(response).toEqual({ + commitSha: 'abc123', + treeSha: 'def456', + targetBranch: 'main', + packBytes: 42, + blobCount: 1, + refUpdate: { + branch: 'main', + oldSha: '0000000000000000000000000000000000000000', + newSha: 'abc123', + }, + }); + expect(response.refUpdate.oldSha).toHaveLength(40); + }); + + it('includes base_branch metadata when provided', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ repo_id: 'repo-base', url: 'https://repo.git' }), + }), + ); + + mockFetch.mockImplementationOnce(async (_url, init) => { + const body = await readRequestBody(init?.body); + const frames = body + .trim() + .split('\n') + .map((line) => JSON.parse(line)); + const metadata = frames[0].metadata; + expect(metadata.target_branch).toBe('feature/one'); + expect(metadata.expected_head_sha).toBe('abc123'); + expect(metadata.base_branch).toBe('main'); + return { + ok: true, + status: 200, + json: async () => ({ + commit: { + commit_sha: 'deadbeef', + tree_sha: 'cafebabe', + target_branch: 'feature/one', + pack_bytes: 1, + blob_count: 0, + }, + result: { + branch: 'feature/one', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'deadbeef', + success: true, + status: 'ok', + }, + }), + }; + }); + + const repo = await store.createRepo({ id: 'repo-base' }); + await repo + .createCommit({ + targetBranch: 'feature/one', + baseBranch: 'main', + expectedHeadSha: 'abc123', + commitMessage: 'branch off main', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + .addFileFromString('docs/base.txt', 'hello') + .send(); + }); + + it('allows base_branch without expectedHeadSha', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ repo_id: 'repo-base-no-head', url: 'https://repo.git' }), + }), + ); + + mockFetch.mockImplementationOnce(async (_url, init) => { + const body = await readRequestBody(init?.body); + const metadata = JSON.parse(body.trim().split('\n')[0]).metadata; + expect(metadata.base_branch).toBe('main'); + expect(metadata).not.toHaveProperty('expected_head_sha'); + return { + ok: true, + status: 200, + json: async () => ({ + commit: { + commit_sha: 'abc123', + tree_sha: 'def456', + target_branch: 'feature/one', + pack_bytes: 1, + blob_count: 1, + }, + result: { + branch: 'feature/one', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'abc123', + success: true, + status: 'ok', + }, + }), + }; + }); + + const repo = await store.createRepo({ id: 'repo-base-no-head' }); + await repo + .createCommit({ + targetBranch: 'feature/one', + baseBranch: 'main', + commitMessage: 'branch off', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + .addFileFromString('docs/base.txt', 'hello') + .send(); + }); + + it('includes ephemeral flags when requested', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ repo_id: 'repo-ephemeral', url: 'https://repo.git' }), + }), + ); + + const commitAck = { + commit: { + commit_sha: 'eph123', + tree_sha: 'eph456', + target_branch: 'feature/demo', + pack_bytes: 1, + blob_count: 1, + }, + result: { + branch: 'feature/demo', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'eph123', + success: true, + status: 'ok', + }, + }; + + mockFetch.mockImplementationOnce(async (_url, init) => { + const body = await readRequestBody(init?.body); + const frames = body + .trim() + .split('\n') + .map((line) => JSON.parse(line)); + const metadata = frames[0].metadata; + expect(metadata.target_branch).toBe('feature/demo'); + expect(metadata.base_branch).toBe('feature/base'); + expect(metadata.ephemeral).toBe(true); + expect(metadata.ephemeral_base).toBe(true); + return { + ok: true, + status: 200, + json: async () => commitAck, + }; + }); + + const repo = await store.createRepo({ id: 'repo-ephemeral' }); + await repo + .createCommit({ + targetBranch: 'feature/demo', + baseBranch: 'feature/base', + ephemeral: true, + ephemeralBase: true, + commitMessage: 'ephemeral commit', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + .addFileFromString('docs/ephemeral.txt', 'hello') + .send(); + }); + + it('accepts Blob and ReadableStream sources', async () => { + randomSpy?.mockRestore(); + const ids = ['blob-source', 'stream-source']; + randomSpy = vi + .spyOn(globalThis.crypto, 'randomUUID') + .mockImplementation(() => ids.shift() ?? 'overflow'); + + const store = new GitStorage({ name: 'v0', key }); + + const commitAck = { + commit: { + commit_sha: 'feedbeef', + tree_sha: 'c0ffee42', + target_branch: 'main', + pack_bytes: 128, + blob_count: 2, + }, + result: { + branch: 'main', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'feedbeef', + success: true, + status: 'ok', + }, + }; + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'repo-blobs', url: 'https://repo.git' }), + }), + ); + + mockFetch.mockImplementationOnce(async (_url, init) => { + const body = await readRequestBody(init?.body); + const frames = body + .trim() + .split('\n') + .map((line) => JSON.parse(line)); + const metadata = frames[0].metadata; + expect(metadata.files).toEqual([ + expect.objectContaining({ path: 'assets/blob.bin', content_id: 'blob-source' }), + expect.objectContaining({ path: 'assets/stream.bin', content_id: 'stream-source' }), + ]); + + const chunkFrames = frames.slice(1).map((frame) => frame.blob_chunk); + expect(chunkFrames).toHaveLength(2); + const decoded = Object.fromEntries( + chunkFrames.map((chunk) => [ + chunk.content_id, + Buffer.from(chunk.data, 'base64').toString('utf8'), + ]), + ); + expect(decoded['blob-source']).toBe('blob-payload'); + expect(decoded['stream-source']).toBe('streamed-payload'); + + return { + ok: true, + status: 200, + json: async () => commitAck, + }; + }); + + const repo = await store.createRepo({ id: 'repo-blobs' }); + const blob = new Blob(['blob-payload'], { type: 'text/plain' }); + const readable = new ReadableStream({ + start(controller) { + controller.enqueue(new TextEncoder().encode('streamed-payload')); + controller.close(); + }, + }); + + const result = await repo + .createCommit({ + targetBranch: 'main', + commitMessage: 'Add mixed sources', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + .addFile('assets/blob.bin', blob) + .addFile('assets/stream.bin', readable) + .send(); + + expect(result.commitSha).toBe('feedbeef'); + expect(result.refUpdate.newSha).toBe('feedbeef'); + }); + + it('splits large payloads into <=4MiB chunks', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ repo_id: 'repo-chunk', url: 'https://repo.git' }), + }), + ); + + mockFetch.mockImplementationOnce(async (_url, init) => { + const body = await readRequestBody(init?.body); + const lines = body.trim().split('\n'); + expect(lines.length).toBe(3); + + const firstChunk = JSON.parse(lines[1]).blob_chunk; + const secondChunk = JSON.parse(lines[2]).blob_chunk; + + expect(Buffer.from(firstChunk.data, 'base64')).toHaveLength(MAX_CHUNK_BYTES); + expect(firstChunk.eof).toBe(false); + + expect(Buffer.from(secondChunk.data, 'base64')).toHaveLength(10); + expect(secondChunk.eof).toBe(true); + + return { + ok: true, + status: 200, + json: async () => ({ + commit: { + commit_sha: 'chunk123', + tree_sha: 'tree456', + target_branch: 'main', + pack_bytes: MAX_CHUNK_BYTES + 10, + blob_count: 1, + }, + result: { + branch: 'main', + old_sha: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', + new_sha: 'chunk123', + success: true, + status: 'ok', + }, + }), + }; + }); + + const repo = await store.createRepo({ id: 'repo-chunk' }); + const payload = new Uint8Array(MAX_CHUNK_BYTES + 10).fill(0x61); // 'a' + + const result = await repo + .createCommit({ + targetBranch: 'main', + commitMessage: 'Large commit', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + .addFile('large.bin', payload) + .send(); + + expect(result.refUpdate.oldSha).toHaveLength(40); + expect(result.refUpdate.newSha).toBe('chunk123'); + }); + + it('throws when author is missing', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ repo_id: 'repo-missing-author', url: 'https://repo.git' }), + }), + ); + + const repo = await store.createRepo({ id: 'repo-missing-author' }); + expect(() => + repo.createCommit({ targetBranch: 'main', commitMessage: 'Missing author' }), + ).toThrow('createCommit author name and email are required'); + }); + + it('accepts legacy targetRef for backwards compatibility', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ repo_id: 'repo-legacy-target-ref', url: 'https://repo.git' }), + }), + ); + + const commitAck = { + commit: { + commit_sha: 'legacy123', + tree_sha: 'legacy456', + target_branch: 'main', + pack_bytes: 0, + blob_count: 0, + }, + result: { + branch: 'main', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'legacy123', + success: true, + status: 'ok', + }, + }; + + mockFetch.mockImplementationOnce(async (_url, init) => { + const body = await readRequestBody(init?.body); + const [metadataLine] = body.trim().split('\n'); + const payload = JSON.parse(metadataLine); + expect(payload.metadata.target_branch).toBe('main'); + return { + ok: true, + status: 200, + json: async () => commitAck, + }; + }); + + const repo = await store.createRepo({ id: 'repo-legacy-target-ref' }); + const response = await repo + .createCommit({ + targetRef: 'refs/heads/main', + commitMessage: 'Legacy path', + author: { name: 'Legacy Author', email: 'legacy@example.com' }, + }) + .send(); + + expect(response.targetBranch).toBe('main'); + expect(response.commitSha).toBe('legacy123'); + }); + + it('supports non-UTF encodings when Buffer is available', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ repo_id: 'repo-enc', url: 'https://repo.git' }), + }), + ); + + mockFetch.mockImplementationOnce(async (_url, init) => { + const body = await readRequestBody(init?.body); + const lines = body.trim().split('\n'); + expect(lines).toHaveLength(2); + const chunk = JSON.parse(lines[1]).blob_chunk; + const decoded = Buffer.from(chunk.data, 'base64').toString('latin1'); + expect(decoded).toBe('\u00a1Hola!'); + return { + ok: true, + status: 200, + json: async () => ({ + commit: { + commit_sha: 'enc123', + tree_sha: 'treeenc', + target_branch: 'main', + pack_bytes: 12, + blob_count: 1, + }, + result: { + branch: 'main', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'enc123', + success: true, + status: 'ok', + }, + }), + }; + }); + + const repo = await store.createRepo({ id: 'repo-enc' }); + await repo + .createCommit({ + targetBranch: 'main', + commitMessage: 'Add latin1 greeting', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + .addFileFromString('docs/hola.txt', '\u00a1Hola!', { encoding: 'latin1' }) + .send(); + }); + + it('honors deprecated ttl option when sending commits', async () => { + const store = new GitStorage({ name: 'v0', key }); + const legacyTTL = 4321; + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ repo_id: 'repo-legacy-ttl', url: 'https://repo.git' }), + }), + ); + + const commitAck = { + commit: { + commit_sha: 'legacy123', + tree_sha: 'treetree', + target_branch: 'main', + pack_bytes: 16, + blob_count: 1, + }, + result: { + branch: 'main', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'legacy123', + success: true, + status: 'ok', + }, + }; + + let authHeader: string | undefined; + mockFetch.mockImplementationOnce(async (_url, init) => { + authHeader = (init?.headers as Record | undefined)?.Authorization; + return { + ok: true, + status: 200, + json: async () => commitAck, + }; + }); + + const repo = await store.createRepo({ id: 'repo-legacy-ttl' }); + await repo + .createCommit({ + targetBranch: 'main', + commitMessage: 'Legacy ttl commit', + author: { name: 'Author Name', email: 'author@example.com' }, + ttl: legacyTTL, + }) + .addFileFromString('docs/legacy.txt', 'legacy ttl content') + .send(); + + expect(authHeader).toBeDefined(); + const payload = decodeJwtPayload(stripBearer(authHeader!)); + expect(payload.exp - payload.iat).toBe(legacyTTL); + }); + + it('rejects baseBranch values with refs prefix', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'repo-base-prefix' }); + expect(() => + repo.createCommit({ + targetBranch: 'feature/two', + baseBranch: 'refs/heads/main', + expectedHeadSha: 'abc123', + commitMessage: 'branch', + author: { name: 'Author Name', email: 'author@example.com' }, + }), + ).toThrow('createCommit baseBranch must not include refs/ prefix'); + }); + + it('throws RefUpdateError when backend reports failure', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ repo_id: 'repo-fail', url: 'https://repo.git' }), + }), + ); + + mockFetch.mockImplementationOnce(async () => ({ + ok: true, + status: 200, + json: async () => ({ + commit: { + commit_sha: 'deadbeef', + tree_sha: 'feedbabe', + target_branch: 'main', + pack_bytes: 0, + blob_count: 0, + }, + result: { + branch: 'main', + old_sha: '1234567890123456789012345678901234567890', + new_sha: 'deadbeef', + success: false, + status: 'precondition_failed', + message: 'base mismatch', + }, + }), + })); + + const repo = await store.createRepo({ id: 'repo-fail' }); + + await expect( + repo + .createCommit({ + targetBranch: 'main', + commitMessage: 'bad commit', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + .addFileFromString('docs/readme.md', 'oops') + .send(), + ).rejects.toBeInstanceOf(RefUpdateError); + }); + + it('includes Code-Storage-Agent header in commit requests', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ repo_id: 'repo-user-agent', url: 'https://repo.git' }), + }), + ); + + const commitAck = { + commit: { + commit_sha: 'useragent123', + tree_sha: 'tree456', + target_branch: 'main', + pack_bytes: 10, + blob_count: 1, + }, + result: { + branch: 'main', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'useragent123', + success: true, + status: 'ok', + }, + }; + + let capturedHeaders: Record | undefined; + mockFetch.mockImplementationOnce(async (_url, init) => { + capturedHeaders = init?.headers as Record; + return { + ok: true, + status: 200, + json: async () => commitAck, + }; + }); + + const repo = await store.createRepo({ id: 'repo-user-agent' }); + await repo + .createCommit({ + targetBranch: 'main', + commitMessage: 'Test user agent', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + .addFileFromString('test.txt', 'test') + .send(); + + expect(capturedHeaders).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toMatch(/code-storage-sdk\/\d+\.\d+\.\d+/); + }); +}); diff --git a/packages/git-storage-sdk-node/tests/full-workflow.js b/packages/git-storage-sdk-node/tests/full-workflow.js new file mode 100644 index 000000000..5ea8e310b --- /dev/null +++ b/packages/git-storage-sdk-node/tests/full-workflow.js @@ -0,0 +1,999 @@ +#!/usr/bin/env node + +/** + * Git3P Full Workflow smoke test driven by the GitStorage SDK. + * + * This script mirrors the behaviour of + * git3p-backend/hack/test-scripts/test-full-workflow.sh using the SDK instead + * of shelling out to git, curl, or Go helpers. + * + * Run with: + * node packages/git-storage-sdk/tests/full-workflow.js + * + * The script expects the Git3P backend + storage services to be reachable. By + * default it targets the local environment and uses the dev private key bundled + * in the repository. Override behaviour via the environment variables below: + * + * GIT_STORAGE_ENV - local | staging | prod | production (default: local) + * GIT_STORAGE_API_BASE_URL - overrides API base URL + * GIT_STORAGE_STORAGE_BASE_URL - overrides storage host (hostname:port) + * GIT_STORAGE_NAME - customer namespace / issuer (default derived from env) + * GIT_STORAGE_SUBDOMAIN - used when env !== local and NAME not provided + * GIT_STORAGE_DEFAULT_BRANCH - default branch name (default: main) + * GIT_STORAGE_REPO_ID - optional repo id override + * GIT_STORAGE_KEY_PATH - path to signing key (default: dev key) + * + * CLI flags override the environment variables above: + * -e, --environment ENV - local | staging | prod | production + * -k, --key PATH - path to signing key + * -s, --subdomain NAME - customer subdomain (staging/prod/production) + * -n, --namespace NAME - explicit namespace override + * -r, --repo NAME - repository id override + * --api-base-url URL - override API base URL + * --storage-base-url HOST - override storage host + * --timeout MS - override timeout in milliseconds + * --key-id ID - JWT kid value override + * -h, --help - show this help text + * + * The script is idempotent per unique repo id – it always creates a fresh repo. + */ + +import { createHash, createPrivateKey } from 'node:crypto'; +import { existsSync } from 'node:fs'; +import { readFile } from 'node:fs/promises'; +import path from 'node:path'; +import process from 'node:process'; +import { setTimeout as delay } from 'node:timers/promises'; +import { fileURLToPath, pathToFileURL } from 'node:url'; +import { importPKCS8, SignJWT } from 'jose'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const defaultKeyPath = path.resolve( + __dirname, + '../../../git3p-backend/hack/test-scripts/dev-keys/private.pem', +); + +function gitBlobSha(contents) { + const buffer = Buffer.from(contents, 'utf8'); + const header = `blob ${buffer.byteLength}\0`; + return createHash('sha1').update(header).update(buffer).digest('hex'); +} + +function normalizeEnvironment(value) { + if (!value) { + return 'local'; + } + const normalized = value.toLowerCase(); + if (normalized === 'local') { + return 'local'; + } + if (normalized === 'stage' || normalized === 'staging') { + return 'staging'; + } + if (normalized === 'prod' || normalized === 'production') { + return 'production'; + } + throw new Error( + `Unsupported environment "${value}". Expected one of: local, staging, prod, production.`, + ); +} + +function applyOrgPlaceholder(value, org) { + if (!value) { + return value; + } + return value.includes('{{org}}') ? value.replace('{{org}}', org) : value; +} + +function printUsage() { + const scriptName = path.relative(process.cwd(), __filename); + console.log( + [ + `Usage: node ${scriptName} [options]`, + '', + 'Options:', + ' -e, --environment ENV Target environment (local|staging|prod|production)', + ' -k, --key PATH Path to signing key PEM', + ' -s, --subdomain NAME Customer subdomain for non-local environments', + ' -n, --namespace NAME Explicit namespace override', + ' -r, --repo NAME Repository identifier override', + ' --api-base-url URL Override API base URL', + ' --storage-base-url HOST Override storage host (hostname[:port])', + ' --timeout MS Override timeout in milliseconds', + ' --key-id ID Override JWT key identifier', + ' -h, --help Show this help text and exit', + '', + 'Environment variables can also be used (see script header for details).', + ].join('\n'), + ); +} + +function parseArgs(argv) { + const options = {}; + for (let index = 0; index < argv.length; index += 1) { + const arg = argv[index]; + const readValue = () => { + const next = argv[index + 1]; + if (!next) { + throw new Error(`Missing value for option "${arg}"`); + } + index += 1; + return next; + }; + + switch (arg) { + case '-e': + case '--environment': + options.environment = readValue(); + break; + case '-k': + case '--key': + options.keyPath = readValue(); + break; + case '-s': + case '--subdomain': + options.subdomain = readValue(); + break; + case '-n': + case '--namespace': + options.namespace = readValue(); + break; + case '-r': + case '--repo': + options.repoId = readValue(); + break; + case '--api-base-url': + options.apiBaseUrl = readValue(); + break; + case '--storage-base-url': + options.storageBaseUrl = readValue(); + break; + case '--timeout': + options.timeout = readValue(); + break; + case '--key-id': + options.keyId = readValue(); + break; + case '-h': + case '--help': + options.help = true; + break; + default: + if (arg.startsWith('-')) { + throw new Error(`Unknown option "${arg}"`); + } + throw new Error(`Unexpected argument "${arg}"`); + } + } + return options; +} + +let cliOptions; +try { + cliOptions = parseArgs(process.argv.slice(2)); +} catch (error) { + console.error(error instanceof Error ? error.message : 'Failed to parse command line arguments.'); + printUsage(); + process.exit(1); +} + +if (cliOptions?.help) { + printUsage(); + process.exit(0); +} + +const env = { + runEnv: normalizeEnvironment(process.env.GIT_STORAGE_ENV ?? 'local'), + apiBaseUrl: process.env.GIT_STORAGE_API_BASE_URL, + storageBaseUrl: process.env.GIT_STORAGE_STORAGE_BASE_URL, + namespace: process.env.GIT_STORAGE_NAME, + subdomain: process.env.GIT_STORAGE_SUBDOMAIN, + defaultBranch: process.env.GIT_STORAGE_DEFAULT_BRANCH ?? 'main', + repoId: process.env.GIT_STORAGE_REPO_ID, + keyPath: process.env.GIT_STORAGE_KEY_PATH ?? defaultKeyPath, + keyId: process.env.GIT_STORAGE_KEY_ID ?? 'dev-key-001', + timeoutMs: process.env.GIT_STORAGE_TIMEOUT + ? Number.parseInt(process.env.GIT_STORAGE_TIMEOUT, 10) + : undefined, +}; + +if (cliOptions?.environment) { + env.runEnv = normalizeEnvironment(cliOptions.environment); +} +if (cliOptions?.apiBaseUrl) { + env.apiBaseUrl = cliOptions.apiBaseUrl; +} +if (cliOptions?.storageBaseUrl) { + env.storageBaseUrl = cliOptions.storageBaseUrl; +} +if (cliOptions?.namespace) { + env.namespace = cliOptions.namespace; +} +if (cliOptions?.subdomain) { + env.subdomain = cliOptions.subdomain; +} +if (cliOptions?.repoId) { + env.repoId = cliOptions.repoId; +} +if (cliOptions?.keyPath) { + env.keyPath = path.resolve(cliOptions.keyPath); +} +if (cliOptions?.keyId) { + env.keyId = cliOptions.keyId; +} +if (cliOptions?.timeout) { + const parsedTimeout = Number.parseInt(cliOptions.timeout, 10); + if (Number.isNaN(parsedTimeout) || parsedTimeout <= 0) { + console.error( + `Invalid timeout value "${cliOptions.timeout}". Expected a positive integer in milliseconds.`, + ); + printUsage(); + process.exit(1); + } + env.timeoutMs = parsedTimeout; +} + +const KEY_CACHE = new Map(); + +const JSON_BODY_PREVIEW_LIMIT = 1_024; +if (typeof Response !== 'undefined' && Response.prototype?.json) { + const originalResponseJson = Response.prototype.json; + Response.prototype.json = async function patchedJson(...args) { + try { + return await originalResponseJson.apply(this, args); + } catch (error) { + if (error instanceof SyntaxError) { + let bodyPreview = ''; + try { + const clone = this.clone(); + const text = await clone.text(); + bodyPreview = + text.length > JSON_BODY_PREVIEW_LIMIT + ? `${text.slice(0, JSON_BODY_PREVIEW_LIMIT)}...` + : text; + } catch (readError) { + const reason = readError instanceof Error ? readError.message : String(readError); + bodyPreview = ``; + } + + const context = { + url: this.url ?? '', + status: this.status ?? '', + statusText: this.statusText ?? '', + bodyPreview, + }; + + try { + const headers = {}; + for (const [key, value] of this.headers.entries()) { + headers[key] = value; + } + context.headers = headers; + } catch { + // Ignore header extraction failures. + } + + console.error('WARNING: Failed to parse JSON response.', context); + } + throw error; + } + }; +} + +function resolveDistEntry() { + const esmPath = path.resolve(__dirname, '../dist/index.js'); + if (existsSync(esmPath)) { + return esmPath; + } + return null; +} + +async function loadGitStorage() { + const distEntry = resolveDistEntry(); + if (!distEntry) { + throw new Error( + [ + 'GitStorage dist build not found.', + 'Run "pnpm --filter @pierre/storage build" before executing this script,', + 'or provide a compiled dist at packages/git-storage-sdk/dist/index.js.', + ].join(' '), + ); + } + const module = await import(pathToFileURL(distEntry).href); + if (!module.GitStorage) { + throw new Error(`GitStorage export missing from ${distEntry}`); + } + patchGitStorage(module.GitStorage); + return module.GitStorage; +} + +async function waitFor(check, options = {}) { + const { timeout = 120_000, interval = 2_000, description, isFatalError, onRetry } = options; + const deadline = Date.now() + timeout; + let lastError; + let attempt = 0; + + while (Date.now() < deadline) { + attempt += 1; + try { + const result = await check(); + if (result) { + return result; + } + } catch (error) { + if (typeof isFatalError === 'function' && isFatalError(error)) { + throw error; + } + lastError = error; + if (typeof onRetry === 'function') { + onRetry({ attempt, error }); + } + } + if (!lastError && typeof onRetry === 'function') { + onRetry({ attempt }); + } + await delay(interval); + } + + const message = description + ? `Timed out waiting for ${description}` + : 'Timed out waiting for condition'; + if (lastError instanceof Error) { + throw new Error(message, { cause: lastError }); + } + throw new Error(message); +} + +async function main() { + if (!existsSync(env.keyPath)) { + throw new Error( + `Signing key not found at ${env.keyPath}. Set GIT_STORAGE_KEY_PATH to override.`, + ); + } + + const key = await readFile(env.keyPath, 'utf8'); + + const namespace = + env.namespace ?? + (env.runEnv === 'local' + ? 'local' + : (env.subdomain ?? + (() => { + throw new Error( + 'Set GIT_STORAGE_NAME or GIT_STORAGE_SUBDOMAIN when targeting non-local environments.', + ); + })())); + + const namespaceSlug = namespace.toLowerCase(); + + const apiBaseUrl = applyOrgPlaceholder( + env.apiBaseUrl ?? + (env.runEnv === 'local' + ? 'http://127.0.0.1:8081' + : env.runEnv === 'staging' + ? 'https://api.{{org}}.3p.pierre.rip' + : 'https://api.{{org}}.code.storage'), + namespaceSlug, + ); + + const storageBaseUrl = applyOrgPlaceholder( + env.storageBaseUrl ?? + (env.runEnv === 'local' + ? '127.0.0.1:8080' + : env.runEnv === 'staging' + ? '{{org}}.3p.pierre.rip' + : '{{org}}.code.storage'), + namespaceSlug, + ); + + const repoId = env.repoId ?? `sdk-full-workflow-${Date.now()}`; + const defaultBranch = env.defaultBranch; + const timeout = env.timeoutMs ?? 180_000; + const grepToken = `SDK_GREP_${repoId}`; + + console.log(`▶ GitStorage full workflow`); + console.log(` Environment: ${env.runEnv}`); + console.log(` Namespace: ${namespace}`); + console.log(` API base: ${apiBaseUrl}`); + console.log(` Storage host:${storageBaseUrl}`); + console.log(` Repo ID: ${repoId}`); + console.log(` Timeout: ${timeout / 1000}s`); + + const GitStorage = await loadGitStorage(); + const store = new GitStorage({ + name: namespace, + key, + apiBaseUrl, + storageBaseUrl, + }); + + const repo = await store.createRepo({ id: repoId, defaultBranch }); + console.log(`✓ Repository created (${repo.id})`); + + const signature = () => ({ + name: 'SDK Committer', + email: 'sdk@example.com', + }); + + const initialSig = signature(); + const initialCommit = await repo + .createCommit({ + targetBranch: defaultBranch, + commitMessage: 'Initial commit: Add README via SDK', + author: initialSig, + committer: initialSig, + }) + .addFileFromString( + 'README.md', + [ + `# ${repoId}`, + '', + 'This repository is created by the GitStorage SDK full workflow script.', + '', + `Grep marker: ${grepToken}`, + 'Case marker: ONLYUPPERCASE', + ].join('\n'), + { encoding: 'utf-8' }, + ) + .send(); + const baselineCommitSha = initialCommit.commitSha; + let latestCommitSha = baselineCommitSha; + console.log(`✓ Initial commit pushed (${latestCommitSha})`); + + await waitFor( + async () => { + const branches = await repo.listBranches({ limit: 10 }); + return branches.branches.find( + (branch) => branch.name === defaultBranch && branch.headSha === latestCommitSha, + ); + }, + { timeout, description: `default branch ${defaultBranch} to include initial commit` }, + ); + console.log(`✓ Default branch updated (${defaultBranch})`); + + await waitFor( + async () => { + const commits = await repo.listCommits({ branch: defaultBranch, limit: 5 }); + return commits.commits.find((commit) => commit.sha === latestCommitSha); + }, + { timeout, description: 'initial commit to appear in commit list' }, + ); + console.log(`✓ Commit listing reflects initial commit`); + + if (typeof repo.grep !== 'function') { + throw new Error( + [ + 'This repo instance does not expose repo.grep().', + 'The full-workflow script loads the SDK from packages/git-storage-sdk/dist, which is likely stale.', + 'Run "pnpm --filter @pierre/storage build" and retry.', + ].join(' '), + ); + } + + await waitFor( + async () => { + const result = await repo.grep({ + ref: defaultBranch, + query: { pattern: 'onlyuppercase', caseSensitive: true }, + limits: { maxLines: 5 }, + }); + + if (result.matches.length !== 0) { + throw new Error( + `Expected case-sensitive grep to return zero matches, got ${result.matches.length}`, + ); + } + + return result; + }, + { + timeout, + description: 'grep API to return empty results for case-sensitive mismatch', + isFatalError: (error) => + error instanceof TypeError || + (error?.name === 'ApiError' && + typeof error.status === 'number' && + error.status >= 400 && + error.status < 500 && + error.status !== 429), + onRetry: ({ attempt, error }) => { + if (error && attempt % 3 === 0) { + console.log( + `… retrying grep case-sensitive check (attempt ${attempt}): ${error.message ?? error}`, + ); + } + }, + }, + ); + + const grepCaseInsensitive = await waitFor( + async () => { + const result = await repo.grep({ + ref: defaultBranch, + query: { pattern: 'onlyuppercase', caseSensitive: false }, + limits: { maxLines: 10 }, + }); + + const hasMatch = result.matches.some((match) => + match.lines.some((line) => line.type === 'match' && line.text.includes('ONLYUPPERCASE')), + ); + return hasMatch ? result : null; + }, + { + timeout, + description: 'grep API to return results for case-insensitive match', + isFatalError: (error) => + error instanceof TypeError || + (error?.name === 'ApiError' && + typeof error.status === 'number' && + error.status >= 400 && + error.status < 500 && + error.status !== 429), + onRetry: ({ attempt, error }) => { + if (attempt % 5 !== 0) { + return; + } + if (error) { + console.log(`… waiting for grep results (attempt ${attempt}): ${error.message ?? error}`); + } else { + console.log(`… waiting for grep results (attempt ${attempt})`); + } + }, + }, + ); + console.log( + `✓ Grep API returns case-insensitive matches (${grepCaseInsensitive.matches.length} file(s))`, + ); + + const grepFiltered = await waitFor( + async () => { + const result = await repo.grep({ + ref: defaultBranch, + query: { pattern: grepToken, caseSensitive: true }, + fileFilters: { includeGlobs: ['README.md'] }, + limits: { maxLines: 10, maxMatchesPerFile: 3 }, + }); + + const hasToken = result.matches.some((match) => + match.lines.some((line) => line.type === 'match' && line.text.includes(grepToken)), + ); + return hasToken ? result : null; + }, + { + timeout, + description: 'grep API to return matches filtered to README.md', + isFatalError: (error) => + error instanceof TypeError || + (error?.name === 'ApiError' && + typeof error.status === 'number' && + error.status >= 400 && + error.status < 500 && + error.status !== 429), + onRetry: ({ attempt, error }) => { + if (attempt % 5 !== 0) { + return; + } + if (error) { + console.log( + `… waiting for grep file-filtered match (attempt ${attempt}): ${error.message ?? error}`, + ); + } else { + console.log(`… waiting for grep file-filtered match (attempt ${attempt})`); + } + }, + }, + ); + console.log(`✓ Grep API respects file filters (${grepFiltered.matches.length} file(s))`); + + const packSig = signature(); + const addMessage = 'Add file via commit-pack API (SDK)'; + const addCommit = await repo + .createCommit({ + targetBranch: defaultBranch, + expectedHeadSha: latestCommitSha, + commitMessage: addMessage, + author: packSig, + committer: packSig, + }) + .addFileFromString( + 'api-generated.txt', + [ + 'File generated via GitStorage SDK full workflow script.', + `Repository: ${repoId}`, + `Commit message: ${addMessage}`, + ].join('\n'), + { encoding: 'utf-8' }, + ) + .send(); + latestCommitSha = addCommit.commitSha; + console.log(`✓ Commit-pack add executed (${latestCommitSha})`); + + await waitFor( + async () => { + const commits = await repo.listCommits({ branch: defaultBranch, limit: 5 }); + const match = commits.commits.find((commit) => commit.sha === latestCommitSha); + if (match) { + if (match.message !== addMessage) { + throw new Error(`Unexpected commit message: ${match.message}`); + } + return match; + } + return null; + }, + { timeout, description: 'commit-pack add to appear in commit list' }, + ); + console.log(`✓ Commit listing includes commit-pack add`); + + await waitFor( + async () => { + const branches = await repo.listBranches({ limit: 10 }); + return branches.branches.find( + (branch) => branch.name === defaultBranch && branch.headSha === latestCommitSha, + ); + }, + { timeout, description: `default branch ${defaultBranch} to advance to commit-pack add` }, + ); + console.log(`✓ Default branch advanced to commit-pack add`); + + await waitFor( + async () => { + const files = await repo.listFiles({ ref: defaultBranch }); + return files.paths.includes('api-generated.txt') ? files : null; + }, + { timeout, description: 'api-generated.txt to appear in listFiles response' }, + ); + console.log(`✓ api-generated.txt present via listFiles`); + + const updateSig = signature(); + const updateMessage = 'Update document via commit-pack API (SDK)'; + const updateCommit = await repo + .createCommit({ + targetBranch: defaultBranch, + expectedHeadSha: latestCommitSha, + commitMessage: updateMessage, + author: updateSig, + committer: updateSig, + }) + .addFileFromString( + 'api-generated.txt', + [ + 'File generated via GitStorage SDK full workflow script.', + `Repository: ${repoId}`, + 'Updated content: CommitPack run verified document update via SDK.', + ].join('\n'), + { encoding: 'utf-8' }, + ) + .send(); + latestCommitSha = updateCommit.commitSha; + console.log(`✓ Commit-pack update executed (${latestCommitSha})`); + + const updateInfo = await waitFor( + async () => { + const commits = await repo.listCommits({ branch: defaultBranch, limit: 5 }); + return commits.commits.find((commit) => commit.sha === latestCommitSha); + }, + { timeout, description: 'commit-pack update to appear in commit list' }, + ); + if (updateInfo.message !== updateMessage) { + throw new Error(`Unexpected commit message for update: ${updateInfo.message}`); + } + console.log(`✓ Commit listing includes commit-pack update`); + + await waitFor( + async () => { + const branches = await repo.listBranches({ limit: 10 }); + return branches.branches.find( + (branch) => branch.name === defaultBranch && branch.headSha === latestCommitSha, + ); + }, + { timeout, description: `default branch ${defaultBranch} to advance to commit-pack update` }, + ); + console.log(`✓ Default branch advanced to commit-pack update`); + + const diff = await waitFor( + async () => { + const response = await repo.getCommitDiff({ sha: latestCommitSha }); + return response.files.some((file) => file.path === 'api-generated.txt') ? response : null; + }, + { timeout, description: 'commit diff for commit-pack update' }, + ); + console.log(`✓ Commit diff verified (${diff.files.length} file(s))`); + + await waitFor( + async () => { + const response = await repo.getFileStream({ path: 'api-generated.txt', ref: defaultBranch }); + const body = await response.text(); + return body.includes('Updated content') ? body : null; + }, + { timeout, description: 'api-generated.txt to return updated content' }, + ); + console.log(`✓ api-generated.txt contains updated content`); + + const diffSig = signature(); + const diffMessage = 'Apply diff via createCommitFromDiff (SDK)'; + const diffFileName = 'diff-endpoint.txt'; + const diffLines = [ + 'Diff commit created by GitStorage SDK full workflow script.', + `Repository: ${repoId}`, + `Timestamp: ${new Date().toISOString()}`, + ]; + const diffBody = `${diffLines.join('\n')}\n`; + const diffBlobSha = gitBlobSha(diffBody); + const diffHunkHeader = `@@ -0,0 +1,${diffLines.length} @@`; + const diffPatchLines = [ + `diff --git a/${diffFileName} b/${diffFileName}`, + 'new file mode 100644', + `index 0000000000000000000000000000000000000000..${diffBlobSha}`, + '--- /dev/null', + `+++ b/${diffFileName}`, + diffHunkHeader, + ...diffLines.map((line) => `+${line}`), + ]; + const diffPatch = `${diffPatchLines.join('\n')}\n`; + + const diffCommit = await repo.createCommitFromDiff({ + targetBranch: defaultBranch, + expectedHeadSha: latestCommitSha, + commitMessage: diffMessage, + author: diffSig, + committer: diffSig, + diff: diffPatch, + }); + latestCommitSha = diffCommit.commitSha; + console.log(`✓ createCommitFromDiff commit executed (${latestCommitSha})`); + + const diffCommitInfo = await waitFor( + async () => { + const commits = await repo.listCommits({ branch: defaultBranch, limit: 5 }); + return commits.commits.find((commit) => commit.sha === latestCommitSha); + }, + { timeout, description: 'diff commit to appear in commit list' }, + ); + if (diffCommitInfo.message !== diffMessage) { + throw new Error(`Unexpected diff commit message: ${diffCommitInfo.message}`); + } + console.log('✓ Commit listing includes createCommitFromDiff commit'); + + await waitFor( + async () => { + const files = await repo.listFiles({ ref: defaultBranch }); + return files.paths.includes(diffFileName) ? files : null; + }, + { timeout, description: `${diffFileName} to appear in listFiles response` }, + ); + console.log(`✓ ${diffFileName} present via listFiles`); + + await waitFor( + async () => { + const response = await repo.getFileStream({ path: diffFileName, ref: defaultBranch }); + const body = await response.text(); + return body.includes('Diff commit created') ? body : null; + }, + { timeout, description: `${diffFileName} to contain diff-applied content` }, + ); + console.log(`✓ ${diffFileName} contains diff-applied content`); + + const featureBranch = `sdk-feature-${Date.now()}`; + const featureSig = signature(); + const featureMessage = 'Create feature branch via commit-pack API (SDK)'; + const featureOptions = { + targetBranch: featureBranch, + baseBranch: defaultBranch, + expectedHeadSha: latestCommitSha, + commitMessage: featureMessage, + author: featureSig, + committer: featureSig, + }; + console.log('[full-workflow] feature commit options', featureOptions); + const featureCommit = await repo + .createCommit(featureOptions) + .addFileFromString( + 'feature.txt', + [ + 'Feature branch file generated via GitStorage SDK full workflow script.', + `Repository: ${repoId}`, + `Branch: ${featureBranch}`, + ].join('\n'), + ) + .send(); + console.log(`✓ Feature branch commit created (${featureCommit.commitSha})`); + + await waitFor( + async () => { + const branches = await repo.listBranches({ limit: 25 }); + return branches.branches.find( + (branch) => branch.name === featureBranch && branch.headSha === featureCommit.commitSha, + ); + }, + { timeout, description: `feature branch ${featureBranch} to appear in branch list` }, + ); + console.log(`✓ Feature branch ${featureBranch} reported by listBranches`); + + await waitFor( + async () => { + const files = await repo.listFiles({ ref: featureBranch }); + return files.paths.includes('feature.txt') ? files : null; + }, + { timeout, description: 'feature branch file to be accessible' }, + ); + console.log(`✓ feature.txt accessible on ${featureBranch}`); + + const restoreSig = signature(); + const restoreMessage = `Restore to pre-deploy baseline`; + const restoreResult = await repo.restoreCommit({ + targetBranch: defaultBranch, + expectedHeadSha: latestCommitSha, + targetCommitSha: baselineCommitSha, + commitMessage: restoreMessage, + author: restoreSig, + committer: restoreSig, + }); + latestCommitSha = restoreResult.commitSha; + console.log(`✓ Restore commit executed (${latestCommitSha})`); + + const restoreInfo = await waitFor( + async () => { + const commits = await repo.listCommits({ branch: defaultBranch, limit: 5 }); + return commits.commits.find((commit) => commit.sha === latestCommitSha); + }, + { timeout, description: 'restore commit to appear in commit list' }, + ); + if (restoreInfo.message !== restoreMessage) { + throw new Error(`Unexpected restore commit message: ${restoreInfo.message}`); + } + + await waitFor( + async () => { + const branches = await repo.listBranches({ limit: 10 }); + return branches.branches.find( + (branch) => branch.name === defaultBranch && branch.headSha === latestCommitSha, + ); + }, + { timeout, description: `default branch ${defaultBranch} to advance to restore commit` }, + ); + console.log(`✓ Default branch advanced to restore commit`); + + await waitFor( + async () => { + const files = await repo.listFiles({ ref: defaultBranch }); + return files.paths.some((path) => path === 'api-generated.txt' || path === diffFileName) + ? null + : files; + }, + { + timeout, + description: 'api-generated.txt and diff-endpoint.txt removed after restore commit', + }, + ); + console.log(`✓ api-generated.txt and ${diffFileName} removed by restore commit`); + + const readmeBody = await repo + .getFileStream({ path: 'README.md', ref: defaultBranch }) + .then((resp) => resp.text()); + if (!readmeBody.includes(repoId)) { + throw new Error('README does not contain repository identifier'); + } + console.log(`✓ README accessible via getFileStream`); + + console.log('\n✅ GitStorage SDK full workflow completed successfully.'); + console.log(` Repository: ${repoId}`); + console.log(` Default branch: ${defaultBranch}`); +} + +function patchGitStorage(GitStorage) { + if (GitStorage.prototype.__fullWorkflowPatched) { + return; + } + + GitStorage.prototype.__fullWorkflowPatched = true; + + GitStorage.prototype.generateJWT = async function patchedGenerateJWT(repoId, options) { + const permissions = options?.permissions || ['git:write', 'git:read']; + const ttl = options?.ttl || 365 * 24 * 60 * 60; + const now = Math.floor(Date.now() / 1_000); + const payload = { + iss: this.options.name, + sub: '@pierre/storage', + repo: repoId, + scopes: permissions, + iat: now, + exp: now + ttl, + }; + + const { key, alg } = await resolveSigningKey(this.options.key); + const header = { alg, typ: 'JWT' }; + if (env.keyId) { + header.kid = env.keyId; + } + + return new SignJWT(payload).setProtectedHeader(header).sign(key); + }; +} + +async function resolveSigningKey(pem) { + if (KEY_CACHE.has(pem)) { + return KEY_CACHE.get(pem); + } + + let selectedAlgorithm; + + try { + const keyObject = createPrivateKey({ key: pem, format: 'pem' }); + const type = keyObject.asymmetricKeyType; + if (type === 'rsa') { + selectedAlgorithm = 'RS256'; + } else if (type === 'rsa-pss') { + selectedAlgorithm = 'PS256'; + } else if (type === 'ec') { + const curve = keyObject.asymmetricKeyDetails?.namedCurve?.toLowerCase(); + switch (curve) { + case 'prime256v1': + case 'secp256r1': + case 'p-256': + selectedAlgorithm = 'ES256'; + break; + case 'secp384r1': + case 'p-384': + selectedAlgorithm = 'ES384'; + break; + case 'secp521r1': + case 'p-521': + selectedAlgorithm = 'ES512'; + break; + default: + break; + } + } else if (type === 'ed25519' || type === 'ed448') { + selectedAlgorithm = 'EdDSA'; + } + // fallthrough to general detection if selectedAlgorithm remains undefined + } catch { + // Ignore inspection errors, fall back to brute-force detection below. + } + + if (selectedAlgorithm) { + try { + const key = await importPKCS8(pem, selectedAlgorithm); + const entry = { key, alg: selectedAlgorithm }; + KEY_CACHE.set(pem, entry); + return entry; + } catch { + // If the direct attempt fails, continue with the fallback list below. + } + } + + const algorithms = [ + 'RS256', + 'RS384', + 'RS512', + 'PS256', + 'PS384', + 'PS512', + 'ES256', + 'ES384', + 'ES512', + 'EdDSA', + ]; + let lastError; + + for (const alg of algorithms) { + try { + const key = await importPKCS8(pem, alg); + const entry = { key, alg }; + KEY_CACHE.set(pem, entry); + return entry; + } catch (error) { + lastError = error; + } + } + + throw new Error('Unsupported key type for JWT signing', { cause: lastError }); +} + +main().catch((error) => { + console.error('❌ Workflow failed.'); + console.error(error instanceof Error ? (error.stack ?? error.message) : error); + process.exitCode = 1; +}); diff --git a/packages/git-storage-sdk-node/tests/index.test.ts b/packages/git-storage-sdk-node/tests/index.test.ts new file mode 100644 index 000000000..6fb7b52de --- /dev/null +++ b/packages/git-storage-sdk-node/tests/index.test.ts @@ -0,0 +1,1680 @@ +import { importPKCS8, jwtVerify } from 'jose'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { CodeStorage, createClient, GitStorage } from '../src/index'; + +// Mock fetch globally if it is not already stubbed +const existingFetch = globalThis.fetch as unknown; +const mockFetch = + existingFetch && typeof existingFetch === 'function' && 'mock' in (existingFetch as any) + ? (existingFetch as ReturnType) + : vi.fn(); + +if (!(existingFetch && typeof existingFetch === 'function' && 'mock' in (existingFetch as any))) { + vi.stubGlobal('fetch', mockFetch); +} + +const key = `-----BEGIN PRIVATE KEY----- +MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgy3DPdzzsP6tOOvmo +rjbx6L7mpFmKKL2hNWNW3urkN8ehRANCAAQ7/DPhGH3kaWl0YEIO+W9WmhyCclDG +yTh6suablSura7ZDG8hpm3oNsq/ykC3Scfsw6ZTuuVuLlXKV/be/Xr0d +-----END PRIVATE KEY-----`; + +const decodeJwtPayload = (jwt: string) => { + const parts = jwt.split('.'); + if (parts.length !== 3) { + throw new Error('Invalid JWT format'); + } + return JSON.parse(Buffer.from(parts[1], 'base64url').toString()); +}; + +const stripBearer = (value: string): string => value.replace(/^Bearer\s+/i, ''); + +describe('GitStorage', () => { + beforeEach(() => { + // Reset mock before each test + mockFetch.mockReset(); + // Default successful response for createRepo + mockFetch.mockResolvedValue({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'test-repo-id', url: 'https://test.code.storage/repo.git' }), + }); + }); + describe('constructor', () => { + it('should create an instance with required options', () => { + const store = new GitStorage({ name: 'v0', key }); + expect(store).toBeInstanceOf(GitStorage); + }); + + it('should store the provided key', () => { + const store = new GitStorage({ name: 'v0', key }); + const config = store.getConfig(); + expect(config.key).toBe(key); + }); + + it('should throw error when key is missing', () => { + expect(() => { + // @ts-expect-error - Testing missing key + new GitStorage({}); + }).toThrow( + 'GitStorage requires a name and key. Please check your configuration and try again.', + ); + }); + + it('should throw error when name or key is null or undefined', () => { + expect(() => { + // @ts-expect-error - Testing null key + new GitStorage({ name: 'v0', key: null }); + }).toThrow( + 'GitStorage requires a name and key. Please check your configuration and try again.', + ); + + expect(() => { + // @ts-expect-error - Testing undefined key + new GitStorage({ name: 'v0', key: undefined }); + }).toThrow( + 'GitStorage requires a name and key. Please check your configuration and try again.', + ); + + expect(() => { + // @ts-expect-error - Testing null name + new GitStorage({ name: null, key: 'test-key' }); + }).toThrow( + 'GitStorage requires a name and key. Please check your configuration and try again.', + ); + + expect(() => { + // @ts-expect-error - Testing undefined name + new GitStorage({ name: undefined, key: 'test-key' }); + }).toThrow( + 'GitStorage requires a name and key. Please check your configuration and try again.', + ); + }); + + it('should throw error when key is empty string', () => { + expect(() => { + new GitStorage({ name: 'v0', key: '' }); + }).toThrow('GitStorage key must be a non-empty string.'); + }); + + it('should throw error when name is empty string', () => { + expect(() => { + new GitStorage({ name: '', key: 'test-key' }); + }).toThrow('GitStorage name must be a non-empty string.'); + }); + + it('should throw error when key is only whitespace', () => { + expect(() => { + new GitStorage({ name: 'v0', key: ' ' }); + }).toThrow('GitStorage key must be a non-empty string.'); + }); + + it('should throw error when name is only whitespace', () => { + expect(() => { + new GitStorage({ name: ' ', key: 'test-key' }); + }).toThrow('GitStorage name must be a non-empty string.'); + }); + + it('should throw error when key is not a string', () => { + expect(() => { + // @ts-expect-error - Testing non-string key + new GitStorage({ name: 'v0', key: 123 }); + }).toThrow('GitStorage key must be a non-empty string.'); + + expect(() => { + // @ts-expect-error - Testing non-string key + new GitStorage({ name: 'v0', key: {} }); + }).toThrow('GitStorage key must be a non-empty string.'); + }); + + it('should throw error when name is not a string', () => { + expect(() => { + // @ts-expect-error - Testing non-string name + new GitStorage({ name: 123, key: 'test-key' }); + }).toThrow('GitStorage name must be a non-empty string.'); + + expect(() => { + // @ts-expect-error - Testing non-string name + new GitStorage({ name: {}, key: 'test-key' }); + }).toThrow('GitStorage name must be a non-empty string.'); + }); + }); + + it('parses commit dates into Date instances', async () => { + const store = new GitStorage({ name: 'v0', key }); + + const repo = await store.createRepo({ id: 'repo-dates' }); + + const rawCommits = { + commits: [ + { + sha: 'abc123', + message: 'feat: add endpoint', + author_name: 'Jane Doe', + author_email: 'jane@example.com', + committer_name: 'Jane Doe', + committer_email: 'jane@example.com', + date: '2024-01-15T14:32:18Z', + }, + ], + next_cursor: undefined, + has_more: false, + }; + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => rawCommits, + }), + ); + + const commits = await repo.listCommits(); + expect(commits.commits[0].rawDate).toBe('2024-01-15T14:32:18Z'); + expect(commits.commits[0].date).toBeInstanceOf(Date); + expect(commits.commits[0].date.toISOString()).toBe('2024-01-15T14:32:18.000Z'); + }); + + it('fetches git notes with getNote', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'repo-notes-read' }); + + mockFetch.mockImplementationOnce((url, init) => { + expect(init?.method).toBe('GET'); + const requestUrl = new URL(url as string); + expect(requestUrl.pathname.endsWith('/repos/notes')).toBe(true); + expect(requestUrl.searchParams.get('sha')).toBe('abc123'); + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + sha: 'abc123', + note: 'hello notes', + ref_sha: 'def456', + }), + } as any); + }); + + const result = await repo.getNote({ sha: 'abc123' }); + expect(result).toEqual({ sha: 'abc123', note: 'hello notes', refSha: 'def456' }); + }); + + it('sends note payloads with createNote, appendNote, and deleteNote', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'repo-notes-write' }); + + mockFetch.mockImplementationOnce((url, init) => { + expect(init?.method).toBe('POST'); + const requestUrl = new URL(url as string); + expect(requestUrl.pathname.endsWith('/repos/notes')).toBe(true); + expect(init?.body).toBeDefined(); + const payload = JSON.parse(init?.body as string); + expect(payload).toEqual({ + sha: 'abc123', + action: 'add', + note: 'note content', + }); + return Promise.resolve({ + ok: true, + status: 201, + statusText: 'Created', + headers: { get: () => 'application/json' } as any, + json: async () => ({ + sha: 'abc123', + target_ref: 'refs/notes/commits', + new_ref_sha: 'def456', + result: { success: true, status: 'ok' }, + }), + } as any); + }); + + const createResult = await repo.createNote({ sha: 'abc123', note: 'note content' }); + expect(createResult.targetRef).toBe('refs/notes/commits'); + + mockFetch.mockImplementationOnce((url, init) => { + expect(init?.method).toBe('POST'); + const requestUrl = new URL(url as string); + expect(requestUrl.pathname.endsWith('/repos/notes')).toBe(true); + expect(init?.body).toBeDefined(); + const payload = JSON.parse(init?.body as string); + expect(payload).toEqual({ + sha: 'abc123', + action: 'append', + note: 'note append', + }); + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + headers: { get: () => 'application/json' } as any, + json: async () => ({ + sha: 'abc123', + target_ref: 'refs/notes/commits', + new_ref_sha: 'def789', + result: { success: true, status: 'ok' }, + }), + } as any); + }); + + const appendResult = await repo.appendNote({ sha: 'abc123', note: 'note append' }); + expect(appendResult.targetRef).toBe('refs/notes/commits'); + + mockFetch.mockImplementationOnce((url, init) => { + expect(init?.method).toBe('DELETE'); + const requestUrl = new URL(url as string); + expect(requestUrl.pathname.endsWith('/repos/notes')).toBe(true); + expect(init?.body).toBeDefined(); + const payload = JSON.parse(init?.body as string); + expect(payload).toEqual({ sha: 'abc123' }); + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + headers: { get: () => 'application/json' } as any, + json: async () => ({ + sha: 'abc123', + target_ref: 'refs/notes/commits', + new_ref_sha: 'def456', + result: { success: true, status: 'ok' }, + }), + } as any); + }); + + const deleteResult = await repo.deleteNote({ sha: 'abc123' }); + expect(deleteResult.targetRef).toBe('refs/notes/commits'); + }); + + it('passes ephemeral flag to getFileStream', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'repo-ephemeral-file' }); + + mockFetch.mockImplementationOnce((url, init) => { + expect(init?.method).toBe('GET'); + const requestUrl = new URL(url as string); + expect(requestUrl.pathname.endsWith('/repos/file')).toBe(true); + expect(requestUrl.searchParams.get('path')).toBe('docs/readme.md'); + expect(requestUrl.searchParams.get('ref')).toBe('feature/demo'); + expect(requestUrl.searchParams.get('ephemeral')).toBe('true'); + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + headers: { get: () => null } as any, + json: async () => ({}), + text: async () => '', + } as any); + }); + + const response = await repo.getFileStream({ + path: 'docs/readme.md', + ref: 'feature/demo', + ephemeral: true, + }); + + expect(response.ok).toBe(true); + expect(response.status).toBe(200); + }); + + it('passes ephemeral flag to listFiles', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'repo-ephemeral-list' }); + + mockFetch.mockImplementationOnce((url, init) => { + expect(init?.method).toBe('GET'); + const requestUrl = new URL(url as string); + expect(requestUrl.pathname.endsWith('/repos/files')).toBe(true); + expect(requestUrl.searchParams.get('ref')).toBe('feature/demo'); + expect(requestUrl.searchParams.get('ephemeral')).toBe('true'); + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + headers: { get: () => null } as any, + json: async () => ({ + paths: ['docs/readme.md'], + ref: 'refs/namespaces/ephemeral/refs/heads/feature/demo', + }), + text: async () => '', + } as any); + }); + + const result = await repo.listFiles({ + ref: 'feature/demo', + ephemeral: true, + }); + + expect(result.paths).toEqual(['docs/readme.md']); + expect(result.ref).toBe('refs/namespaces/ephemeral/refs/heads/feature/demo'); + }); + + it('posts grep request body and parses response', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'repo-grep' }); + + mockFetch.mockImplementationOnce((url, init) => { + expect(init?.method).toBe('POST'); + const requestUrl = new URL(url as string); + expect(requestUrl.pathname.endsWith('/repos/grep')).toBe(true); + + const body = JSON.parse(String(init?.body ?? '{}')); + expect(body).toEqual({ + rev: 'main', + paths: ['src/'], + query: { pattern: 'SEARCHME', case_sensitive: false }, + context: { before: 1, after: 2 }, + limits: { max_lines: 5, max_matches_per_file: 7 }, + pagination: { cursor: 'abc', limit: 3 }, + file_filters: { include_globs: ['**/*.ts'], exclude_globs: ['**/vendor/**'] }, + }); + + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + headers: { get: () => null } as any, + json: async () => ({ + query: { pattern: 'SEARCHME', case_sensitive: false }, + repo: { ref: 'main', commit: 'deadbeef' }, + matches: [ + { + path: 'src/a.ts', + lines: [{ line_number: 12, text: 'SEARCHME', type: 'match' }], + }, + ], + next_cursor: null, + has_more: false, + }), + text: async () => '', + } as any); + }); + + const result = await repo.grep({ + ref: 'main', + paths: ['src/'], + query: { pattern: 'SEARCHME', caseSensitive: false }, + fileFilters: { includeGlobs: ['**/*.ts'], excludeGlobs: ['**/vendor/**'] }, + context: { before: 1, after: 2 }, + limits: { maxLines: 5, maxMatchesPerFile: 7 }, + pagination: { cursor: 'abc', limit: 3 }, + }); + + expect(result.query).toEqual({ pattern: 'SEARCHME', caseSensitive: false }); + expect(result.repo).toEqual({ ref: 'main', commit: 'deadbeef' }); + expect(result.matches).toEqual([ + { + path: 'src/a.ts', + lines: [{ lineNumber: 12, text: 'SEARCHME', type: 'match' }], + }, + ]); + expect(result.nextCursor).toBeUndefined(); + expect(result.hasMore).toBe(false); + }); + + describe('createRepo', () => { + it('should return a repo with id and getRemoteURL function', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + + expect(repo).toBeDefined(); + expect(repo.id).toMatch(/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/); // UUID format + expect(repo.getRemoteURL).toBeInstanceOf(Function); + + const url = await repo.getRemoteURL(); + expect(url).toMatch( + new RegExp(`^https:\\/\\/t:.+@v0\\.3p\\.pierre\\.rip\\/${repo.id}\\.git$`), + ); + expect(url).toContain('eyJ'); // JWT should contain base64 encoded content + }); + + it('should accept options for getRemoteURL', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + + // Test with permissions and ttl + const url = await repo.getRemoteURL({ + permissions: ['git:write', 'git:read'], + ttl: 3600, + }); + expect(url).toMatch( + new RegExp(`^https:\\/\\/t:.+@v0\\.3p\\.pierre\\.rip\\/${repo.id}\\.git$`), + ); + expect(url).toContain('eyJ'); // JWT should contain base64 encoded content + }); + + it('should return ephemeral remote URL with +ephemeral suffix', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + + const url = await repo.getEphemeralRemoteURL(); + expect(url).toMatch( + new RegExp(`^https:\\/\\/t:.+@v0\\.3p\\.pierre\\.rip\\/${repo.id}\\+ephemeral\\.git$`), + ); + expect(url).toContain('eyJ'); // JWT should contain base64 encoded content + expect(url).toContain('+ephemeral.git'); + }); + + it('should accept options for getEphemeralRemote', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + + // Test with permissions and ttl + const url = await repo.getEphemeralRemoteURL({ + permissions: ['git:write', 'git:read'], + ttl: 3600, + }); + expect(url).toMatch( + new RegExp(`^https:\\/\\/t:.+@v0\\.3p\\.pierre\\.rip\\/${repo.id}\\+ephemeral\\.git$`), + ); + expect(url).toContain('eyJ'); // JWT should contain base64 encoded content + }); + + it('getRemoteURL and getEphemeralRemote should return different URLs', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + + const defaultURL = await repo.getRemoteURL(); + const ephemeralURL = await repo.getEphemeralRemoteURL(); + + expect(defaultURL).not.toBe(ephemeralURL); + expect(defaultURL).toContain(`${repo.id}.git`); + expect(ephemeralURL).toContain(`${repo.id}+ephemeral.git`); + expect(ephemeralURL).not.toContain(`${repo.id}.git`); + }); + + it('should use provided id instead of generating UUID', async () => { + const store = new GitStorage({ name: 'v0', key }); + const customName = 'my-custom-repo-name'; + const repo = await store.createRepo({ id: customName }); + + expect(repo.id).toBe(customName); + + const url = await repo.getRemoteURL(); + expect(url).toContain(`/${customName}.git`); + }); + + it('should send baseRepo configuration with default defaultBranch when only baseRepo is provided', async () => { + const store = new GitStorage({ name: 'v0', key }); + const baseRepo = { + provider: 'github' as const, + owner: 'octocat', + name: 'hello-world', + defaultBranch: 'main', + }; + + await store.createRepo({ baseRepo }); + + // Check that fetch was called with baseRepo and default defaultBranch + expect(mockFetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + method: 'POST', + body: JSON.stringify({ + base_repo: { + provider: 'github', + owner: 'octocat', + name: 'hello-world', + default_branch: 'main', + }, + default_branch: 'main', + }), + }), + ); + }); + + it('should send both baseRepo and custom defaultBranch when both are provided', async () => { + const store = new GitStorage({ name: 'v0', key }); + const baseRepo = { + provider: 'github' as const, + owner: 'octocat', + name: 'hello-world', + }; + const defaultBranch = 'develop'; + + await store.createRepo({ baseRepo, defaultBranch }); + + // Check that fetch was called with the correct body + expect(mockFetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + method: 'POST', + body: JSON.stringify({ + base_repo: { + provider: 'github', + owner: 'octocat', + name: 'hello-world', + }, + default_branch: defaultBranch, + }), + }), + ); + }); + + it('should send fork baseRepo configuration with auth token', async () => { + const store = new GitStorage({ name: 'v0', key }); + const baseRepo = { + id: 'template-repo', + ref: 'develop', + }; + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'forked-repo', url: 'https://test.code.storage/repo.git' }), + }); + + const repo = await store.createRepo({ baseRepo }); + + expect(repo.defaultBranch).toBe('main'); + + const requestBody = JSON.parse((mockFetch.mock.calls[0][1] as RequestInit).body as string); + expect(requestBody.default_branch).toBeUndefined(); + expect(requestBody.base_repo).toEqual( + expect.objectContaining({ + provider: 'code', + owner: 'v0', + name: 'template-repo', + operation: 'fork', + ref: 'develop', + }), + ); + expect(requestBody.base_repo.auth?.token).toBeTruthy(); + + const payload = decodeJwtPayload(requestBody.base_repo.auth.token); + expect(payload.repo).toBe('template-repo'); + expect(payload.scopes).toEqual(['git:read']); + }); + + it('should default defaultBranch to "main" when not provided', async () => { + const store = new GitStorage({ name: 'v0', key }); + + await store.createRepo({}); + + // Check that fetch was called with default defaultBranch of 'main' + expect(mockFetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + method: 'POST', + body: JSON.stringify({ + default_branch: 'main', + }), + }), + ); + }); + + it('should use custom defaultBranch when explicitly provided', async () => { + const store = new GitStorage({ name: 'v0', key }); + const customBranch = 'develop'; + + await store.createRepo({ defaultBranch: customBranch }); + + // Check that fetch was called with the custom defaultBranch + expect(mockFetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + method: 'POST', + body: JSON.stringify({ + default_branch: customBranch, + }), + }), + ); + }); + + it('should handle repository already exists error', async () => { + const store = new GitStorage({ name: 'v0', key }); + + // Mock a 409 Conflict response + mockFetch.mockResolvedValue({ + ok: false, + status: 409, + statusText: 'Conflict', + }); + + await expect(store.createRepo({ id: 'existing-repo' })).rejects.toThrow( + 'Repository already exists', + ); + }); + }); + + describe('listRepos', () => { + it('should fetch repositories with org:read scope', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce((url, init) => { + expect(init?.method).toBe('GET'); + expect(url).toBe('https://api.v0.3p.pierre.rip/api/v1/repos'); + + const headers = init?.headers as Record; + const payload = decodeJwtPayload(stripBearer(headers.Authorization)); + expect(payload.scopes).toEqual(['org:read']); + expect(payload.repo).toBe('org'); + + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repos: [ + { + repo_id: 'repo-1', + url: 'owner/repo-1', + default_branch: 'main', + created_at: '2024-01-01T00:00:00Z', + base_repo: { provider: 'github', owner: 'owner', name: 'repo-1' }, + }, + ], + next_cursor: null, + has_more: false, + }), + }); + }); + + const result = await store.listRepos(); + expect(result.repos).toHaveLength(1); + expect(result.repos[0].repoId).toBe('repo-1'); + expect(result.hasMore).toBe(false); + }); + + it('should pass cursor and limit params', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce((url) => { + const requestUrl = new URL(url as string); + expect(requestUrl.pathname.endsWith('/api/v1/repos')).toBe(true); + expect(requestUrl.searchParams.get('cursor')).toBe('cursor-1'); + expect(requestUrl.searchParams.get('limit')).toBe('25'); + + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repos: [], + next_cursor: null, + has_more: false, + }), + }); + }); + + await store.listRepos({ cursor: 'cursor-1', limit: 25 }); + }); + }); + + describe('findOne', () => { + it('should return a repo with getRemoteURL function when found', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repoId = 'test-repo-id'; + const repo = await store.findOne({ id: repoId }); + + expect(repo).toBeDefined(); + expect(repo?.id).toBe(repoId); + expect(repo?.getRemoteURL).toBeInstanceOf(Function); + + const url = await repo?.getRemoteURL(); + expect(url).toMatch(/^https:\/\/t:.+@v0\.3p\.pierre\.rip\/test-repo-id\.git$/); + expect(url).toContain('eyJ'); // JWT should contain base64 encoded content + }); + + it('should handle getRemoteURL with options', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.findOne({ id: 'test-repo-id' }); + + expect(repo).toBeDefined(); + const url = await repo?.getRemoteURL({ + permissions: ['git:read'], + ttl: 7200, + }); + expect(url).toMatch(/^https:\/\/t:.+@v0\.3p\.pierre\.rip\/test-repo-id\.git$/); + expect(url).toContain('eyJ'); // JWT should contain base64 encoded content + }); + }); + + describe('deleteRepo', () => { + it('should delete a repository and return the result', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repoId = 'test-repo-to-delete'; + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repo_id: repoId, + message: `Repository ${repoId} deletion initiated. Physical storage cleanup will complete asynchronously.`, + }), + } as any); + + const result = await store.deleteRepo({ id: repoId }); + + expect(result.repoId).toBe(repoId); + expect(result.message).toContain('deletion initiated'); + }); + + it('should send DELETE request with repo:write scope', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repoId = 'test-repo-delete-scope'; + + mockFetch.mockImplementationOnce((url, init) => { + expect(init?.method).toBe('DELETE'); + expect(url).toBe('https://api.v0.3p.pierre.rip/api/v1/repos/delete'); + + const headers = init?.headers as Record; + expect(headers.Authorization).toMatch(/^Bearer /); + + const payload = decodeJwtPayload(stripBearer(headers.Authorization)); + expect(payload.scopes).toEqual(['repo:write']); + expect(payload.repo).toBe(repoId); + + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repo_id: repoId, + message: 'Repository deletion initiated.', + }), + }); + }); + + await store.deleteRepo({ id: repoId }); + }); + + it('should throw error when repository not found', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockResolvedValueOnce({ + ok: false, + status: 404, + statusText: 'Not Found', + } as any); + + await expect(store.deleteRepo({ id: 'non-existent-repo' })).rejects.toThrow( + 'Repository not found', + ); + }); + + it('should throw error when repository already deleted', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockResolvedValueOnce({ + ok: false, + status: 409, + statusText: 'Conflict', + } as any); + + await expect(store.deleteRepo({ id: 'already-deleted-repo' })).rejects.toThrow( + 'Repository already deleted', + ); + }); + + it('should honor ttl option', async () => { + const store = new GitStorage({ name: 'v0', key }); + const customTTL = 300; + + mockFetch.mockImplementationOnce((_url, init) => { + const headers = init?.headers as Record; + const payload = decodeJwtPayload(stripBearer(headers.Authorization)); + expect(payload.exp - payload.iat).toBe(customTTL); + + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repo_id: 'test-repo', + message: 'Repository deletion initiated.', + }), + }); + }); + + await store.deleteRepo({ id: 'test-repo', ttl: customTTL }); + }); + }); + + describe('Repo createBranch', () => { + it('posts to create branch endpoint and returns parsed result', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'repo-create-branch' }); + + mockFetch.mockImplementationOnce((url, init) => { + expect(url).toBe('https://api.v0.3p.pierre.rip/api/v1/repos/branches/create'); + + const requestInit = init as RequestInit; + expect(requestInit.method).toBe('POST'); + + const headers = requestInit.headers as Record; + expect(headers.Authorization).toMatch(/^Bearer /); + expect(headers['Content-Type']).toBe('application/json'); + + const body = JSON.parse(requestInit.body as string); + expect(body).toEqual({ + base_branch: 'main', + base_is_ephemeral: true, + target_branch: 'feature/demo', + target_is_ephemeral: true, + }); + + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + message: 'branch created', + target_branch: 'feature/demo', + target_is_ephemeral: true, + commit_sha: 'abc123', + }), + } as any); + }); + + const result = await repo.createBranch({ + baseBranch: 'main', + targetBranch: 'feature/demo', + baseIsEphemeral: true, + targetIsEphemeral: true, + }); + + expect(result).toEqual({ + message: 'branch created', + targetBranch: 'feature/demo', + targetIsEphemeral: true, + commitSha: 'abc123', + }); + }); + + it('honors ttl override when creating a branch', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'repo-create-branch-ttl' }); + + mockFetch.mockImplementationOnce((_url, init) => { + const requestInit = init as RequestInit; + const headers = requestInit.headers as Record; + const payload = decodeJwtPayload(stripBearer(headers.Authorization)); + expect(payload.scopes).toEqual(['git:write']); + expect(payload.exp - payload.iat).toBe(600); + + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + message: 'branch created', + target_branch: 'feature/demo', + target_is_ephemeral: false, + }), + } as any); + }); + + const result = await repo.createBranch({ + baseBranch: 'main', + targetBranch: 'feature/demo', + ttl: 600, + }); + + expect(result).toEqual({ + message: 'branch created', + targetBranch: 'feature/demo', + targetIsEphemeral: false, + commitSha: undefined, + }); + }); + + it('requires both base and target branches', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'repo-create-branch-validation' }); + + await expect( + repo.createBranch({ baseBranch: '', targetBranch: 'feature/demo' }), + ).rejects.toThrow('createBranch baseBranch is required'); + + await expect(repo.createBranch({ baseBranch: 'main', targetBranch: '' })).rejects.toThrow( + 'createBranch targetBranch is required', + ); + }); + }); + + describe('Repo getBranchDiff', () => { + it('forwards ephemeralBase flag to the API params', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'repo-branch-diff-ephemeral-base' }); + + mockFetch.mockImplementationOnce((url) => { + const requestUrl = new URL(url as string); + expect(requestUrl.searchParams.get('branch')).toBe('refs/heads/feature/demo'); + expect(requestUrl.searchParams.get('base')).toBe('refs/heads/main'); + expect(requestUrl.searchParams.get('ephemeral_base')).toBe('true'); + + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + branch: 'refs/heads/feature/demo', + base: 'refs/heads/main', + stats: { files: 1, additions: 1, deletions: 0, changes: 1 }, + files: [ + { + path: 'README.md', + state: 'modified', + old_path: null, + raw: '@@', + bytes: 10, + is_eof: true, + }, + ], + filtered_files: [], + }), + } as any); + }); + + const result = await repo.getBranchDiff({ + branch: 'refs/heads/feature/demo', + base: 'refs/heads/main', + ephemeralBase: true, + }); + + expect(result.branch).toBe('refs/heads/feature/demo'); + expect(result.base).toBe('refs/heads/main'); + }); + }); + + describe('Repo restoreCommit', () => { + it('should post metadata to the restore endpoint and return the response', async () => { + const store = new GitStorage({ name: 'v0', key }); + + const createRepoResponse = { + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'test-repo-id', url: 'https://test.code.storage/repo.git' }), + }; + + const restoreResponse = { + commit: { + commit_sha: 'abcdef0123456789abcdef0123456789abcdef01', + tree_sha: 'fedcba9876543210fedcba9876543210fedcba98', + target_branch: 'main', + pack_bytes: 1024, + }, + result: { + branch: 'main', + old_sha: '0123456789abcdef0123456789abcdef01234567', + new_sha: '89abcdef0123456789abcdef0123456789abcdef', + success: true, + status: 'ok', + }, + }; + + mockFetch.mockResolvedValueOnce(createRepoResponse as any); + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 201, + statusText: 'Created', + json: async () => restoreResponse, + } as any); + + const repo = await store.createRepo({}); + const response = await repo.restoreCommit({ + targetBranch: 'main', + expectedHeadSha: 'main', + targetCommitSha: '0123456789abcdef0123456789abcdef01234567', + commitMessage: 'Restore "feature"', + author: { + name: 'Author Name', + email: 'author@example.com', + }, + committer: { + name: 'Committer Name', + email: 'committer@example.com', + }, + }); + + expect(response).toEqual({ + commitSha: 'abcdef0123456789abcdef0123456789abcdef01', + treeSha: 'fedcba9876543210fedcba9876543210fedcba98', + targetBranch: 'main', + packBytes: 1024, + refUpdate: { + branch: 'main', + oldSha: '0123456789abcdef0123456789abcdef01234567', + newSha: '89abcdef0123456789abcdef0123456789abcdef', + }, + }); + + const [, restoreCall] = mockFetch.mock.calls; + expect(restoreCall[0]).toBe('https://api.v0.3p.pierre.rip/api/v1/repos/restore-commit'); + const requestInit = restoreCall[1] as RequestInit; + expect(requestInit.method).toBe('POST'); + expect(requestInit.headers).toMatchObject({ + Authorization: expect.stringMatching(/^Bearer\s.+/), + 'Content-Type': 'application/json', + }); + + const parsedBody = JSON.parse(requestInit.body as string); + expect(parsedBody).toEqual({ + metadata: { + target_branch: 'main', + expected_head_sha: 'main', + target_commit_sha: '0123456789abcdef0123456789abcdef01234567', + commit_message: 'Restore "feature"', + author: { + name: 'Author Name', + email: 'author@example.com', + }, + committer: { + name: 'Committer Name', + email: 'committer@example.com', + }, + }, + }); + }); + + it('throws RefUpdateError when restore fails with a conflict response', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'test-repo-id', url: 'https://test.code.storage/repo.git' }), + } as any); + + mockFetch.mockResolvedValueOnce({ + ok: false, + status: 409, + statusText: 'Conflict', + json: async () => ({ + commit: { + commit_sha: 'cafefeedcafefeedcafefeedcafefeedcafefeed', + tree_sha: 'feedfacefeedfacefeedfacefeedfacefeedface', + target_branch: 'main', + pack_bytes: 0, + }, + result: { + branch: 'main', + old_sha: '0123456789abcdef0123456789abcdef01234567', + new_sha: 'cafefeedcafefeedcafefeedcafefeedcafefeed', + success: false, + status: 'precondition_failed', + message: 'branch moved', + }, + }), + } as any); + + const repo = await store.createRepo({}); + + await expect( + repo.restoreCommit({ + targetBranch: 'main', + expectedHeadSha: 'main', + targetCommitSha: '0123456789abcdef0123456789abcdef01234567', + author: { name: 'Author Name', email: 'author@example.com' }, + }), + ).rejects.toMatchObject({ + name: 'RefUpdateError', + message: 'branch moved', + status: 'precondition_failed', + reason: 'precondition_failed', + }); + }); + + it('throws RefUpdateError when restore returns an error payload without commit data', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'test-repo-id', url: 'https://test.code.storage/repo.git' }), + } as any); + + mockFetch.mockResolvedValueOnce({ + ok: false, + status: 412, + statusText: 'Precondition Failed', + json: async () => ({ + commit: null, + result: { + success: false, + status: 'precondition_failed', + message: 'expected head SHA mismatch', + }, + }), + } as any); + + const repo = await store.createRepo({}); + + await expect( + repo.restoreCommit({ + targetBranch: 'main', + expectedHeadSha: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', + targetCommitSha: 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb', + author: { name: 'Author', email: 'author@example.com' }, + }), + ).rejects.toMatchObject({ + name: 'RefUpdateError', + message: 'expected head SHA mismatch', + status: 'precondition_failed', + reason: 'precondition_failed', + }); + }); + + it('surfaces 404 when restore-commit endpoint is unavailable', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'test-repo-id', url: 'https://test.code.storage/repo.git' }), + } as any); + + mockFetch.mockResolvedValueOnce({ + ok: false, + status: 404, + statusText: 'Not Found', + json: async () => ({ error: 'not found' }), + } as any); + + const repo = await store.createRepo({}); + + await expect( + repo.restoreCommit({ + targetBranch: 'main', + targetCommitSha: '0123456789abcdef0123456789abcdef01234567', + author: { + name: 'Author Name', + email: 'author@example.com', + }, + }), + ).rejects.toMatchObject({ + name: 'RefUpdateError', + message: expect.stringContaining('HTTP 404'), + status: expect.any(String), + }); + }); + }); + + describe('createClient', () => { + it('should create a GitStorage instance', () => { + const client = createClient({ name: 'v0', key }); + expect(client).toBeInstanceOf(GitStorage); + }); + }); + + describe('CodeStorage alias', () => { + it('should be the same class as GitStorage', () => { + expect(CodeStorage).toBe(GitStorage); + }); + + it('should create a CodeStorage instance', () => { + const store = new CodeStorage({ name: 'v0', key }); + expect(store).toBeInstanceOf(CodeStorage); + expect(store).toBeInstanceOf(GitStorage); + }); + + it('should work identically to GitStorage', async () => { + const store = new CodeStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'test-repo' }); + + expect(repo).toBeDefined(); + expect(repo.id).toBe('test-repo'); + + const url = await repo.getRemoteURL(); + expect(url).toMatch(/^https:\/\/t:.+@v0\.3p\.pierre\.rip\/test-repo\.git$/); + }); + }); + + describe('JWT Generation', () => { + const extractJWT = (url: string): string => { + const match = url.match(/https:\/\/t:(.+)@v0\.3p\.pierre\.rip\/.+\.git/); + if (!match) throw new Error('JWT not found in URL'); + return match[1]; + }; + + it('should generate JWT with correct payload structure', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + const url = await repo.getRemoteURL(); + + const jwt = extractJWT(url); + const payload = decodeJwtPayload(jwt); + + expect(payload).toHaveProperty('iss', 'v0'); + expect(payload).toHaveProperty('sub', '@pierre/storage'); + expect(payload).toHaveProperty('repo', repo.id); + expect(payload).toHaveProperty('scopes'); + expect(payload).toHaveProperty('iat'); + expect(payload).toHaveProperty('exp'); + expect(payload.exp).toBeGreaterThan(payload.iat); + }); + + it('should generate JWT with default permissions and TTL', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + const url = await repo.getRemoteURL(); + + const jwt = extractJWT(url); + const payload = decodeJwtPayload(jwt); + + expect(payload.scopes).toEqual(['git:write', 'git:read']); + // Default TTL is 1 year (365 * 24 * 60 * 60 = 31536000 seconds) + expect(payload.exp - payload.iat).toBe(31536000); + }); + + it('should generate JWT with custom permissions and TTL', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + const customTTL = 7200; // 2 hours + const customPermissions = ['git:read' as const]; + + const url = await repo.getRemoteURL({ + permissions: customPermissions, + ttl: customTTL, + }); + + const jwt = extractJWT(url); + const payload = decodeJwtPayload(jwt); + + expect(payload.scopes).toEqual(customPermissions); + expect(payload.exp - payload.iat).toBe(customTTL); + }); + + it('respects ttl option for getRemoteURL', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + const legacyTTL = 1800; + + const url = await repo.getRemoteURL({ + ttl: legacyTTL, + }); + + const jwt = extractJWT(url); + const payload = decodeJwtPayload(jwt); + + expect(payload.exp - payload.iat).toBe(legacyTTL); + }); + + it('should generate valid JWT signature that can be verified', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + const url = await repo.getRemoteURL(); + + const jwt = extractJWT(url); + const importedKey = await importPKCS8(key, 'ES256'); + + // This should not throw if the signature is valid + const { payload } = await jwtVerify(jwt, importedKey); + + expect(payload.iss).toBe('v0'); + expect(payload.repo).toBe(repo.id); + }); + + it('should generate different JWTs for different repos', async () => { + const store = new GitStorage({ name: 'v0', key }); + + const repo1 = await store.findOne({ id: 'repo-1' }); + const repo2 = await store.findOne({ id: 'repo-2' }); + + const url1 = await repo1?.getRemoteURL(); + const url2 = await repo2?.getRemoteURL(); + + const jwt1 = extractJWT(url1!); + const jwt2 = extractJWT(url2!); + + const payload1 = decodeJwtPayload(jwt1); + const payload2 = decodeJwtPayload(jwt2); + + expect(payload1.repo).toBe('repo-1'); + expect(payload2.repo).toBe('repo-2'); + expect(jwt1).not.toBe(jwt2); + }); + + it('should include repo ID in URL path and JWT payload', async () => { + const store = new GitStorage({ name: 'v0', key }); + const customRepoId = 'my-custom-repo'; + + const repo = await store.findOne({ id: customRepoId }); + const url = await repo?.getRemoteURL(); + + // Check URL contains repo ID + expect(url).toContain(`/${customRepoId}.git`); + + // Check JWT payload contains repo ID + const jwt = extractJWT(url!); + const payload = decodeJwtPayload(jwt); + expect(payload.repo).toBe(customRepoId); + }); + }); + + describe('API Methods', () => { + describe('deprecated ttl support', () => { + it('uses deprecated ttl when listing files', async () => { + const store = new GitStorage({ name: 'v0', key }); + const legacyTTL = 900; + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'legacy-ttl', url: 'https://repo.git' }), + }), + ); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ paths: [], ref: 'main' }), + }), + ); + + const repo = await store.createRepo({ id: 'legacy-ttl' }); + await repo.listFiles({ ttl: legacyTTL }); + + const lastCall = mockFetch.mock.calls[mockFetch.mock.calls.length - 1]; + const init = lastCall?.[1] as RequestInit | undefined; + const headers = init?.headers as Record | undefined; + expect(headers?.Authorization).toBeDefined(); + const payload = decodeJwtPayload(stripBearer(headers!.Authorization)); + expect(payload.exp - payload.iat).toBe(legacyTTL); + }); + }); + }); + + describe('Code-Storage-Agent Header', () => { + it('should include Code-Storage-Agent header in createRepo API calls', async () => { + let capturedHeaders: Record | undefined; + mockFetch.mockImplementationOnce((_url, init) => { + capturedHeaders = init?.headers as Record; + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repo_id: 'test-repo-id', + url: 'https://test.code.storage/repo.git', + }), + }); + }); + + const store = new GitStorage({ name: 'v0', key }); + await store.createRepo({ id: 'test-repo' }); + + expect(capturedHeaders).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toMatch(/code-storage-sdk\/\d+\.\d+\.\d+/); + }); + + it('should include Code-Storage-Agent header in listCommits API calls', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'test-commits' }); + + let capturedHeaders: Record | undefined; + mockFetch.mockImplementationOnce((_url, init) => { + capturedHeaders = init?.headers as Record; + return Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ + commits: [], + next_cursor: undefined, + has_more: false, + }), + }); + }); + + await repo.listCommits(); + + expect(capturedHeaders).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toMatch(/code-storage-sdk\/\d+\.\d+\.\d+/); + }); + + it('should include Code-Storage-Agent header in createBranch API calls', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'test-branch' }); + + let capturedHeaders: Record | undefined; + mockFetch.mockImplementationOnce((_url, init) => { + capturedHeaders = init?.headers as Record; + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + message: 'branch created', + target_branch: 'feature/test', + target_is_ephemeral: false, + }), + } as any); + }); + + await repo.createBranch({ + baseBranch: 'main', + targetBranch: 'feature/test', + }); + + expect(capturedHeaders).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toMatch(/code-storage-sdk\/\d+\.\d+\.\d+/); + }); + }); + + describe('URL Generation', () => { + describe('getDefaultAPIBaseUrl', () => { + it('should insert name into API base URL', () => { + // Assuming API_BASE_URL is 'https://api.3p.pierre.rip' + const result = GitStorage.getDefaultAPIBaseUrl('v0'); + expect(result).toBe('https://api.v0.3p.pierre.rip'); + }); + + it('should work with different names', () => { + const result1 = GitStorage.getDefaultAPIBaseUrl('v1'); + expect(result1).toBe('https://api.v1.3p.pierre.rip'); + + const result2 = GitStorage.getDefaultAPIBaseUrl('prod'); + expect(result2).toBe('https://api.prod.3p.pierre.rip'); + }); + }); + + describe('getDefaultStorageBaseUrl', () => { + it('should prepend name to storage base URL', () => { + // Assuming STORAGE_BASE_URL is '3p.pierre.rip' + const result = GitStorage.getDefaultStorageBaseUrl('v0'); + expect(result).toBe('v0.3p.pierre.rip'); + }); + + it('should work with different names', () => { + const result1 = GitStorage.getDefaultStorageBaseUrl('v1'); + expect(result1).toBe('v1.3p.pierre.rip'); + + const result2 = GitStorage.getDefaultStorageBaseUrl('prod'); + expect(result2).toBe('prod.3p.pierre.rip'); + }); + }); + + describe('URL construction with default values', () => { + it('should use getDefaultAPIBaseUrl when apiBaseUrl is not provided', async () => { + const store = new GitStorage({ name: 'v0', key }); + await store.createRepo({ id: 'test-repo' }); + + // Check that the API calls use the default API base URL with name inserted + expect(mockFetch).toHaveBeenCalledWith( + expect.stringContaining('api.v0.3p.pierre.rip'), + expect.any(Object), + ); + }); + + it('should use getDefaultStorageBaseUrl for remote URLs when storageBaseUrl is not provided', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'test-repo' }); + + const url = await repo.getRemoteURL(); + expect(url).toMatch(/^https:\/\/t:.+@v0\.3p\.pierre\.rip\/test-repo\.git$/); + }); + + it('should use getDefaultStorageBaseUrl for ephemeral remote URLs when storageBaseUrl is not provided', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'test-repo' }); + + const url = await repo.getEphemeralRemoteURL(); + expect(url).toMatch(/^https:\/\/t:.+@v0\.3p\.pierre\.rip\/test-repo\+ephemeral\.git$/); + }); + }); + + describe('URL construction with custom values', () => { + it('should use custom apiBaseUrl when provided', async () => { + const customApiBaseUrl = 'custom-api.example.com'; + const store = new GitStorage({ name: 'v0', key, apiBaseUrl: customApiBaseUrl }); + await store.createRepo({ id: 'test-repo' }); + + // Check that the API calls use the custom API base URL + expect(mockFetch).toHaveBeenCalledWith( + expect.stringContaining(customApiBaseUrl), + expect.any(Object), + ); + }); + + it('should use custom storageBaseUrl for remote URLs when provided', async () => { + const customStorageBaseUrl = 'custom-storage.example.com'; + const store = new GitStorage({ name: 'v0', key, storageBaseUrl: customStorageBaseUrl }); + const repo = await store.createRepo({ id: 'test-repo' }); + + const url = await repo.getRemoteURL(); + expect(url).toMatch(/^https:\/\/t:.+@custom-storage\.example\.com\/test-repo\.git$/); + }); + + it('should use custom storageBaseUrl for ephemeral remote URLs when provided', async () => { + const customStorageBaseUrl = 'custom-storage.example.com'; + const store = new GitStorage({ name: 'v0', key, storageBaseUrl: customStorageBaseUrl }); + const repo = await store.createRepo({ id: 'test-repo' }); + + const url = await repo.getEphemeralRemoteURL(); + expect(url).toMatch( + /^https:\/\/t:.+@custom-storage\.example\.com\/test-repo\+ephemeral\.git$/, + ); + }); + + it('should use custom apiBaseUrl in createCommit transport', async () => { + const customApiBaseUrl = 'custom-api.example.com'; + const store = new GitStorage({ name: 'v0', key, apiBaseUrl: customApiBaseUrl }); + const repo = await store.createRepo({ id: 'test-repo' }); + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + commit: { + commit_sha: 'abc123', + tree_sha: 'def456', + target_branch: 'main', + pack_bytes: 1024, + blob_count: 1, + }, + result: { + branch: 'main', + old_sha: 'old123', + new_sha: 'new456', + success: true, + status: 'ok', + }, + }), + } as any); + + const builder = repo.createCommit({ + targetBranch: 'main', + author: { name: 'Test', email: 'test@example.com' }, + commitMessage: 'Test commit', + }); + + await builder.addFileFromString('test.txt', 'test content').send(); + + // Verify that the fetch was called with the custom API base URL + const lastCall = mockFetch.mock.calls[mockFetch.mock.calls.length - 1]; + expect(lastCall[0]).toContain(customApiBaseUrl); + }); + + it('should use custom apiBaseUrl in createCommitFromDiff', async () => { + const customApiBaseUrl = 'custom-api.example.com'; + const store = new GitStorage({ name: 'v0', key, apiBaseUrl: customApiBaseUrl }); + const repo = await store.createRepo({ id: 'test-repo' }); + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + commit: { + commit_sha: 'abc123', + tree_sha: 'def456', + target_branch: 'main', + pack_bytes: 1024, + blob_count: 1, + }, + result: { + branch: 'main', + old_sha: 'old123', + new_sha: 'new456', + success: true, + status: 'ok', + }, + }), + } as any); + + await repo.createCommitFromDiff({ + targetBranch: 'main', + author: { name: 'Test', email: 'test@example.com' }, + commitMessage: 'Test commit', + diff: 'diff content', + }); + + // Verify that the fetch was called with the custom API base URL + const lastCall = mockFetch.mock.calls[mockFetch.mock.calls.length - 1]; + expect(lastCall[0]).toContain(customApiBaseUrl); + }); + }); + + describe('Different name values', () => { + it('should generate correct URLs for different name values', async () => { + const names = ['v0', 'v1', 'staging', 'prod']; + + for (const name of names) { + mockFetch.mockReset(); + mockFetch.mockResolvedValue({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'test-repo', url: 'https://test.code.storage/repo.git' }), + }); + + const store = new GitStorage({ name, key }); + const repo = await store.createRepo({ id: 'test-repo' }); + + const remoteUrl = await repo.getRemoteURL(); + expect(remoteUrl).toMatch( + new RegExp(`^https:\\/\\/t:.+@${name}\\.3p\\.pierre\\.rip\\/test-repo\\.git$`), + ); + + const ephemeralUrl = await repo.getEphemeralRemoteURL(); + expect(ephemeralUrl).toMatch( + new RegExp( + `^https:\\/\\/t:.+@${name}\\.3p\\.pierre\\.rip\\/test-repo\\+ephemeral\\.git$`, + ), + ); + + // Check API calls use the correct URL + expect(mockFetch).toHaveBeenCalledWith( + expect.stringContaining(`api.${name}.3p.pierre.rip`), + expect.any(Object), + ); + } + }); + }); + }); +}); diff --git a/packages/git-storage-sdk-node/tests/version.test.ts b/packages/git-storage-sdk-node/tests/version.test.ts new file mode 100644 index 000000000..b81825a23 --- /dev/null +++ b/packages/git-storage-sdk-node/tests/version.test.ts @@ -0,0 +1,64 @@ +import { describe, expect, it } from 'vitest'; +import packageJson from '../package.json'; +import { getUserAgent, PACKAGE_NAME, PACKAGE_VERSION } from '../src/version'; + +describe('version', () => { + describe('PACKAGE_NAME', () => { + it('should export the correct package name', () => { + expect(PACKAGE_NAME).toBe('code-storage-sdk'); + }); + }); + + describe('PACKAGE_VERSION', () => { + it('should export the correct package version', () => { + expect(PACKAGE_VERSION).toBe(packageJson.version); + }); + + it('should follow semantic versioning format', () => { + // Check if version follows semver pattern (e.g., 0.3.0, 1.0.0, 1.2.3-beta.1) + const semverPattern = /^\d+\.\d+\.\d+(-[a-zA-Z0-9.-]+)?$/; + expect(PACKAGE_VERSION).toMatch(semverPattern); + }); + }); + + describe('getUserAgent', () => { + it('should return a valid user agent string', () => { + const userAgent = getUserAgent(); + expect(userAgent).toBeDefined(); + expect(typeof userAgent).toBe('string'); + }); + + it('should format user agent as "{name}/{version}"', () => { + const userAgent = getUserAgent(); + expect(userAgent).toBe(`${PACKAGE_NAME}/${PACKAGE_VERSION}`); + }); + + it('should return a non-empty string', () => { + const userAgent = getUserAgent(); + expect(userAgent.length).toBeGreaterThan(0); + }); + + it('should contain the package name', () => { + const userAgent = getUserAgent(); + expect(userAgent).toContain('code-storage-sdk'); + }); + + it('should contain the version number', () => { + const userAgent = getUserAgent(); + expect(userAgent).toContain(packageJson.version); + }); + + it('should return consistent value across multiple calls', () => { + const userAgent1 = getUserAgent(); + const userAgent2 = getUserAgent(); + expect(userAgent1).toBe(userAgent2); + }); + + it('should match the expected format pattern', () => { + const userAgent = getUserAgent(); + // Pattern: name/version + const pattern = /^[\w-]+\/\d+\.\d+\.\d+(-[a-zA-Z0-9.-]+)?$/; + expect(userAgent).toMatch(pattern); + }); + }); +}); diff --git a/packages/git-storage-sdk-node/tests/webhook.test.ts b/packages/git-storage-sdk-node/tests/webhook.test.ts new file mode 100644 index 000000000..0c8c19714 --- /dev/null +++ b/packages/git-storage-sdk-node/tests/webhook.test.ts @@ -0,0 +1,370 @@ +import { describe, expect, it } from 'vitest'; +import { + parseSignatureHeader, + type RawWebhookPushEvent, + validateWebhook, + validateWebhookSignature, + type WebhookPushEvent, +} from '../src'; +import { createHmac } from '../src/util'; + +describe('Webhook Validation', () => { + const secret = 'test_webhook_secret_key_123'; + const rawPayload: RawWebhookPushEvent = { + repository: { + id: 'repo_abc123def456ghi789jkl', + url: 'https://git.example.com/org/repo', + }, + ref: 'main', + before: 'abc123000000000000000000000000000000000', + after: 'def456000000000000000000000000000000000', + customer_id: 'cust_xyz789mno456pqr123st', + pushed_at: '2024-01-20T10:30:00Z', + }; + const expectedPushPayload: WebhookPushEvent = { + type: 'push', + repository: { + id: 'repo_abc123def456ghi789jkl', + url: 'https://git.example.com/org/repo', + }, + ref: 'main', + before: 'abc123000000000000000000000000000000000', + after: 'def456000000000000000000000000000000000', + customerId: 'cust_xyz789mno456pqr123st', + pushedAt: new Date('2024-01-20T10:30:00Z'), + rawPushedAt: '2024-01-20T10:30:00Z', + }; + const payloadStr = JSON.stringify(rawPayload); + + // Helper to generate a valid signature + async function generateSignature( + payloadData: string, + webhookSecret: string, + timestamp?: number, + ): Promise<{ header: string; timestamp: number }> { + const ts = timestamp ?? Math.floor(Date.now() / 1000); + const signedData = `${ts}.${payloadData}`; + const signature = await createHmac('sha256', webhookSecret, signedData); + return { + header: `t=${ts},sha256=${signature}`, + timestamp: ts, + }; + } + + describe('parseSignatureHeader', () => { + it('should parse valid signature header', () => { + const header = 't=1234567890,sha256=abcdef123456'; + const result = parseSignatureHeader(header); + expect(result).toEqual({ + timestamp: '1234567890', + signature: 'abcdef123456', + }); + }); + + it('should handle header with spaces', () => { + const header = 't=1234567890, sha256=abcdef123456'; + const result = parseSignatureHeader(header); + expect(result).toEqual({ + timestamp: '1234567890', + signature: 'abcdef123456', + }); + }); + + it('should return null for invalid header format', () => { + expect(parseSignatureHeader('')).toBeNull(); + expect(parseSignatureHeader('invalid')).toBeNull(); + expect(parseSignatureHeader('t=123')).toBeNull(); // Missing signature + expect(parseSignatureHeader('sha256=abc')).toBeNull(); // Missing timestamp + expect(parseSignatureHeader('timestamp=123,signature=abc')).toBeNull(); // Wrong keys + }); + + it('should handle header with extra fields', () => { + const header = 't=1234567890,sha256=abcdef123456,v1=ignored'; + const result = parseSignatureHeader(header); + expect(result).toEqual({ + timestamp: '1234567890', + signature: 'abcdef123456', + }); + }); + }); + + describe('validateWebhookSignature', () => { + it('should validate correct signature', async () => { + const { header, timestamp } = await generateSignature(payloadStr, secret); + const result = await validateWebhookSignature(payloadStr, header, secret); + + expect(result).toEqual({ + valid: true, + timestamp, + }); + }); + + it('should validate with Buffer payload', async () => { + const { header, timestamp } = await generateSignature(payloadStr, secret); + const payloadBuffer = Buffer.from(payloadStr, 'utf8'); + const result = await validateWebhookSignature(payloadBuffer, header, secret); + + expect(result).toEqual({ + valid: true, + timestamp, + }); + }); + + it('should reject invalid signature', async () => { + const { header } = await generateSignature(payloadStr, 'wrong_secret'); + const result = await validateWebhookSignature(payloadStr, header, secret); + + expect(result.valid).toBe(false); + expect(result.error).toBe('Invalid signature'); + }); + + it('should reject old timestamp (replay protection)', async () => { + const oldTimestamp = Math.floor(Date.now() / 1000) - 400; // 400 seconds ago + const { header } = await generateSignature(payloadStr, secret, oldTimestamp); + const result = await validateWebhookSignature(payloadStr, header, secret); + + expect(result.valid).toBe(false); + expect(result.error).toMatch(/Webhook timestamp too old/); + expect(result.timestamp).toBe(oldTimestamp); + }); + + it('should reject future timestamp', async () => { + const futureTimestamp = Math.floor(Date.now() / 1000) + 120; // 2 minutes in future + const { header } = await generateSignature(payloadStr, secret, futureTimestamp); + const result = await validateWebhookSignature(payloadStr, header, secret); + + expect(result.valid).toBe(false); + expect(result.error).toBe('Webhook timestamp is in the future'); + expect(result.timestamp).toBe(futureTimestamp); + }); + + it('should allow disabling timestamp validation', async () => { + const oldTimestamp = Math.floor(Date.now() / 1000) - 3600; // 1 hour ago + const { header } = await generateSignature(payloadStr, secret, oldTimestamp); + const result = await validateWebhookSignature(payloadStr, header, secret, { + maxAgeSeconds: 0, + }); + + expect(result).toEqual({ + valid: true, + timestamp: oldTimestamp, + }); + }); + + it('should use custom max age', async () => { + const timestamp = Math.floor(Date.now() / 1000) - 60; // 60 seconds ago + const { header } = await generateSignature(payloadStr, secret, timestamp); + + // Should fail with 30 second max age + const result1 = await validateWebhookSignature(payloadStr, header, secret, { + maxAgeSeconds: 30, + }); + expect(result1.valid).toBe(false); + + // Should succeed with 120 second max age + const result2 = await validateWebhookSignature(payloadStr, header, secret, { + maxAgeSeconds: 120, + }); + expect(result2.valid).toBe(true); + }); + + it('should reject malformed signature header', async () => { + const result = await validateWebhookSignature(payloadStr, 'invalid_header', secret); + expect(result).toEqual({ + valid: false, + error: 'Invalid signature header format', + }); + }); + + it('should reject non-numeric timestamp', async () => { + const header = 't=not_a_number,sha256=abcdef123456'; + const result = await validateWebhookSignature(payloadStr, header, secret); + expect(result).toEqual({ + valid: false, + error: 'Invalid timestamp in signature', + }); + }); + + it('should handle different payload modifications', async () => { + const { header } = await generateSignature(payloadStr, secret); + + // Modified payload should fail + const modifiedPayload = payloadStr.replace('main', 'master'); + const result1 = await validateWebhookSignature(modifiedPayload, header, secret); + expect(result1.valid).toBe(false); + + // Extra whitespace should fail + const result2 = await validateWebhookSignature(payloadStr + ' ', header, secret); + expect(result2.valid).toBe(false); + + // Different encoding should work if content is same + const payloadBuffer = Buffer.from(payloadStr); + const result3 = await validateWebhookSignature(payloadBuffer, header, secret); + expect(result3.valid).toBe(true); + }); + }); + + describe('validateWebhook', () => { + it('should validate and parse webhook', async () => { + const { header, timestamp } = await generateSignature(payloadStr, secret); + const headers = { + 'x-pierre-signature': header, + 'x-pierre-event': 'push', + }; + + const result = await validateWebhook(payloadStr, headers, secret); + + expect(result.valid).toBe(true); + expect(result.eventType).toBe('push'); + expect(result.timestamp).toBe(timestamp); + expect(result.payload).toEqual(expectedPushPayload); + }); + + it('should handle uppercase headers', async () => { + const { header, timestamp } = await generateSignature(payloadStr, secret); + const headers = { + 'X-Pierre-Signature': header, + 'X-Pierre-Event': 'push', + }; + + const result = await validateWebhook(payloadStr, headers, secret); + + expect(result.valid).toBe(true); + expect(result.eventType).toBe('push'); + expect(result.timestamp).toBe(timestamp); + }); + + it('should reject missing signature header', async () => { + const headers = { + 'x-pierre-event': 'push', + }; + + const result = await validateWebhook(payloadStr, headers, secret); + + expect(result).toEqual({ + valid: false, + error: 'Missing or invalid X-Pierre-Signature header', + }); + }); + + it('should reject missing event header', async () => { + const { header } = await generateSignature(payloadStr, secret); + const headers = { + 'x-pierre-signature': header, + }; + + const result = await validateWebhook(payloadStr, headers, secret); + + expect(result).toEqual({ + valid: false, + error: 'Missing or invalid X-Pierre-Event header', + }); + }); + + it('should reject array headers', async () => { + const { header } = await generateSignature(payloadStr, secret); + + const headers1 = { + 'x-pierre-signature': [header, header], + 'x-pierre-event': 'push', + }; + const result1 = await validateWebhook(payloadStr, headers1, secret); + expect(result1.valid).toBe(false); + + const headers2 = { + 'x-pierre-signature': header, + 'x-pierre-event': ['push', 'push'], + }; + const result2 = await validateWebhook(payloadStr, headers2, secret); + expect(result2.valid).toBe(false); + }); + + it('should reject invalid JSON payload', async () => { + const invalidJson = 'not valid json'; + const { header } = await generateSignature(invalidJson, secret); + const headers = { + 'x-pierre-signature': header, + 'x-pierre-event': 'push', + }; + + const result = await validateWebhook(invalidJson, headers, secret); + + expect(result.valid).toBe(false); + expect(result.error).toBe('Invalid JSON payload'); + }); + + it('should propagate signature validation errors', async () => { + const { header } = await generateSignature(payloadStr, 'wrong_secret'); + const headers = { + 'x-pierre-signature': header, + 'x-pierre-event': 'push', + }; + + const result = await validateWebhook(payloadStr, headers, secret); + + expect(result.valid).toBe(false); + expect(result.error).toBe('Invalid signature'); + }); + + it('should pass through validation options', async () => { + const oldTimestamp = Math.floor(Date.now() / 1000) - 3600; // 1 hour ago + const { header } = await generateSignature(payloadStr, secret, oldTimestamp); + const headers = { + 'x-pierre-signature': header, + 'x-pierre-event': 'push', + }; + + // Should fail with default max age + const result1 = await validateWebhook(payloadStr, headers, secret); + expect(result1.valid).toBe(false); + + // Should succeed with disabled timestamp validation + const result2 = await validateWebhook(payloadStr, headers, secret, { maxAgeSeconds: 0 }); + expect(result2.valid).toBe(true); + expect(result2.payload).toEqual(expectedPushPayload); + }); + }); + + describe('Security considerations', () => { + it('should use constant-time comparison', async () => { + // This test verifies the implementation uses timingSafeEqual + // by ensuring different length signatures are rejected before comparison + const { header } = await generateSignature(payloadStr, secret); + const shortSigHeader = header.replace(/sha256=.*/, 'sha256=short'); + const result = await validateWebhookSignature(payloadStr, shortSigHeader, secret); + + expect(result.valid).toBe(false); + expect(result.error).toBe('Invalid signature'); + }); + + it('should handle empty or undefined inputs safely', async () => { + const { header } = await generateSignature(payloadStr, secret); + + // Empty payload + expect((await validateWebhookSignature('', header, secret)).valid).toBe(false); + + // Empty secret + expect((await validateWebhookSignature(payloadStr, header, '')).valid).toBe(false); + + // Empty header + expect((await validateWebhookSignature(payloadStr, '', secret)).valid).toBe(false); + }); + + it('should be resilient to timing attacks', async () => { + // Generate multiple signatures to test timing consistency + const signatures: string[] = []; + for (let i = 0; i < 10; i++) { + const testSecret = `secret_${i}`; + const { header } = await generateSignature(payloadStr, testSecret); + signatures.push(header); + } + + // All invalid signatures should be rejected + // The implementation should use constant-time comparison + for (const sig of signatures) { + const result = await validateWebhookSignature(payloadStr, sig, secret); + expect(result.valid).toBe(false); + } + }); + }); +}); diff --git a/packages/git-storage-sdk-node/tsconfig.json b/packages/git-storage-sdk-node/tsconfig.json new file mode 100644 index 000000000..1116724a5 --- /dev/null +++ b/packages/git-storage-sdk-node/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../tsconfig.options.json", + "include": ["src/**/*", "tsup.config.ts", "package.json"], + "references": [], + "compilerOptions": { + "outDir": "../../.moon/cache/types/packages/git-storage-sdk", + "tsBuildInfoFile": "../../.moon/cache/types/packages/git-storage-sdk/.tsbuildinfo" + } +} diff --git a/packages/git-storage-sdk-node/tsconfig.tsup.json b/packages/git-storage-sdk-node/tsconfig.tsup.json new file mode 100644 index 000000000..fa4cd2a9b --- /dev/null +++ b/packages/git-storage-sdk-node/tsconfig.tsup.json @@ -0,0 +1,13 @@ +{ + "extends": "../../tsconfig.options.json", + "include": ["src/**/*"], + "compilerOptions": { + "composite": false, + "incremental": false, + "declaration": true, + "emitDeclarationOnly": true, + "declarationMap": true, + "outDir": "./dist", + "tsBuildInfoFile": "./dist/.tsbuildinfo" + } +} diff --git a/packages/git-storage-sdk-node/tsup.config.ts b/packages/git-storage-sdk-node/tsup.config.ts new file mode 100644 index 000000000..4adefdf5c --- /dev/null +++ b/packages/git-storage-sdk-node/tsup.config.ts @@ -0,0 +1,21 @@ +import { defineConfig } from 'tsup'; + +export default defineConfig({ + entry: ['src/index.ts'], + format: ['cjs', 'esm'], + dts: true, + clean: true, + sourcemap: true, + minify: false, + splitting: false, + treeshake: true, + external: ['node:crypto', 'crypto'], + tsconfig: 'tsconfig.tsup.json', + esbuildOptions(options) { + // Always define the URLs at build time + options.define = { + __API_BASE_URL__: JSON.stringify('https://api.{{org}}.code.storage'), + __STORAGE_BASE_URL__: JSON.stringify('{{org}}.code.storage'), + }; + }, +}); diff --git a/packages/git-storage-sdk-node/vitest.config.ts b/packages/git-storage-sdk-node/vitest.config.ts new file mode 100644 index 000000000..fda74e8ec --- /dev/null +++ b/packages/git-storage-sdk-node/vitest.config.ts @@ -0,0 +1,11 @@ +import { defineConfig } from 'vitest/config'; + +export default defineConfig({ + test: { + restoreMocks: true, + }, + define: { + __API_BASE_URL__: JSON.stringify('https://api.{{org}}.3p.pierre.rip'), + __STORAGE_BASE_URL__: JSON.stringify('{{org}}.3p.pierre.rip'), + }, +}); diff --git a/packages/git-storage-sdk-python/.gitignore b/packages/git-storage-sdk-python/.gitignore new file mode 100644 index 000000000..716ff3406 --- /dev/null +++ b/packages/git-storage-sdk-python/.gitignore @@ -0,0 +1,61 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +*.manifest +*.spec + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Virtual environments +venv/ +ENV/ +env/ + +# IDEs +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Ruff +.ruff_cache/ diff --git a/packages/git-storage-sdk-python/.python-version b/packages/git-storage-sdk-python/.python-version new file mode 100644 index 000000000..c70edfad8 --- /dev/null +++ b/packages/git-storage-sdk-python/.python-version @@ -0,0 +1 @@ +3.11.13 diff --git a/packages/git-storage-sdk-python/DEVELOPMENT.md b/packages/git-storage-sdk-python/DEVELOPMENT.md new file mode 100644 index 000000000..ba06cfcc3 --- /dev/null +++ b/packages/git-storage-sdk-python/DEVELOPMENT.md @@ -0,0 +1,298 @@ +# Development Guide + +This document provides technical details for developers working on the Pierre Storage Python SDK. + +## Architecture + +The SDK is organized into the following modules: + +### Core Modules + +- **`client.py`**: Main `GitStorage` class for creating/finding repositories +- **`repo.py`**: `RepoImpl` class implementing all repository operations +- **`commit.py`**: `CommitBuilderImpl` for creating commits with streaming support +- **`auth.py`**: JWT token generation and signing +- **`errors.py`**: Custom exception classes +- **`types.py`**: Type definitions using TypedDict and Enums +- **`webhook.py`**: Webhook signature validation utilities + +### Design Patterns + +1. **Protocol-based interfaces**: Uses `Protocol` classes for type checking without inheritance +2. **Fluent builder**: `CommitBuilder` provides chainable methods for composing commits +3. **Async/await**: All I/O operations are async for better performance +4. **Streaming**: Large files are streamed in 4MB chunks to avoid memory issues + +## Module Details + +### Authentication (`auth.py`) + +JWT generation with automatic algorithm detection: +- ES256 for elliptic curve keys (most common) +- RS256 for RSA keys +- EdDSA for Ed25519/Ed448 keys + +Uses `cryptography` library for key loading and PyJWT for signing. + +### Commit Builder (`commit.py`) + +Key features: +- Fluent API for building commits +- Streaming support for large files +- Chunking into 4MB segments +- NDJSON protocol for server communication +- Error handling with detailed ref update information + +### Repository Operations (`repo.py`) + +Implements all Git storage API endpoints: +- File operations (get, list) +- Branch and commit listing with pagination +- Diff operations (branch, commit) +- Pull upstream +- Restore commits +- Commit creation + +### Type System (`types.py`) + +Uses TypedDict for better IDE support and runtime type checking: +- All API options are typed +- Results are structured with TypedDict +- Enums for constants (DiffFileState, GitFileMode) + +## Testing Strategy + +### Unit Tests (`tests/test_client.py`, `tests/test_webhook.py`) + +- Mock HTTP responses using `unittest.mock` +- Test error conditions and validation +- Verify JWT generation and structure +- Test webhook signature validation + +### Integration Tests (`tests/test_full_workflow.py`) + +- End-to-end workflow testing +- Configurable via environment variables +- Mirrors TypeScript test for consistency +- Uses `wait_for` helper for async polling + +## Dependencies + +### Required + +- **httpx**: Async HTTP client with streaming support +- **pyjwt**: JWT encoding/decoding +- **cryptography**: Key management and crypto operations +- **pydantic**: Data validation (future use) +- **typing-extensions**: Backport of typing features for Python 3.8+ + +### Development + +- **pytest**: Test framework +- **pytest-asyncio**: Async test support +- **pytest-cov**: Coverage reporting +- **mypy**: Static type checking +- **ruff**: Fast linter and formatter + +## Code Style + +### Type Hints + +All public functions must have type hints: + +```python +async def create_repo( + self, + options: Optional[CreateRepoOptions] = None +) -> Repo: + """Create a new repository.""" + ... +``` + +### Docstrings + +Use Google-style docstrings: + +```python +def generate_jwt( + key_pem: str, + issuer: str, + repo_id: str, + scopes: Optional[List[str]] = None, + ttl: int = 31536000, +) -> str: + """Generate a JWT token for Git storage authentication. + + Args: + key_pem: Private key in PEM format (PKCS8) + issuer: Token issuer (customer name) + repo_id: Repository identifier + scopes: List of permission scopes + ttl: Time-to-live in seconds + + Returns: + Signed JWT token string + + Raises: + ValueError: If key is invalid or cannot be loaded + """ + ... +``` + +### Error Handling + +Use specific exception types: + +```python +try: + repo = await storage.create_repo({"id": "test"}) +except ApiError as e: + # Handle API errors + print(f"API error: {e.status_code}") +except RefUpdateError as e: + # Handle ref update failures + print(f"Ref update failed: {e.status}") +``` + +## Performance Considerations + +### Streaming + +Large files are streamed to avoid memory issues: + +```python +async def _chunkify(self, source: FileSource) -> AsyncIterator[Dict[str, Any]]: + """Chunkify a file source into MAX_CHUNK_BYTES segments.""" + # Yields 4MB chunks as they're read + ... +``` + +### Connection Pooling + +Uses `httpx.AsyncClient` which provides connection pooling by default. + +### Async Operations + +All I/O is async, allowing concurrent operations: + +```python +# Run multiple operations concurrently +results = await asyncio.gather( + repo.list_files(), + repo.list_commits(), + repo.list_branches(), +) +``` + +## Debugging + +### Enable HTTP logging + +```python +import logging +logging.basicConfig(level=logging.DEBUG) +``` + +### Inspect JWT tokens + +```python +import jwt + +token = "eyJ..." +payload = jwt.decode(token, options={"verify_signature": False}) +print(payload) +``` + +### Verbose test output + +```bash +pytest -vv --log-cli-level=DEBUG +``` + +## Building and Publishing + +### Install dependencies + +```bash +# Install all dependencies (including dev dependencies) +uv sync + +# Install only production dependencies +uv sync --no-dev +``` + +### Build package + +```bash +uv build +``` + +### Check package + +```bash +uv run twine check dist/* +``` + +### Upload to PyPI + +```bash +uv run twine upload dist/* +``` + +### Test installation + +```bash +uv pip install dist/pierre_storage-0.1.4-py3-none-any.whl +``` + +## Compatibility + +- **Python**: 3.8+ (uses TypedDict, Protocol) +- **Operating Systems**: All (uses pure Python) +- **Async Runtime**: asyncio (standard library) + +## Future Improvements + +Potential areas for enhancement: + +1. **Retry logic**: Automatic retry with exponential backoff +2. **Caching**: Optional caching of frequently accessed data +3. **Progress callbacks**: Report upload/download progress +4. **Batch operations**: Optimize multiple API calls +5. **Better error messages**: More context in error messages +6. **Pluggable transports**: Allow custom HTTP clients + +## Maintenance + +### Updating dependencies + +```bash +# Update to latest compatible versions +uv lock --upgrade + +# Update a specific package +uv lock --upgrade-package httpx + +# Check for security updates +uv run pip-audit +``` + +### Version bumping + +Update version in `pyproject.toml`: + +```toml +[project] +version = "0.2.0" +``` + +### Changelog + +Document changes in CHANGELOG.md following Keep a Changelog format. + +## Resources + +- [Pierre API Documentation](https://docs.pierre.io/api) +- [TypeScript SDK](../git-storage-sdk) - Reference implementation +- [httpx Documentation](https://www.python-httpx.org/) +- [PyJWT Documentation](https://pyjwt.readthedocs.io/) diff --git a/packages/git-storage-sdk-python/LICENSE b/packages/git-storage-sdk-python/LICENSE new file mode 100644 index 000000000..e2c2b0606 --- /dev/null +++ b/packages/git-storage-sdk-python/LICENSE @@ -0,0 +1,189 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, and + distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by the + copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all other + entities that control, are controlled by, or are under common control with + that entity. For the purposes of this definition, "control" means (i) the + power, direct or indirect, to cause the direction or management of such + entity, whether by contract or otherwise, or (ii) ownership of fifty percent + (50%) or more of the outstanding shares, or (iii) beneficial ownership of + such entity. + + "You" (or "Your") shall mean an individual or Legal Entity exercising + permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation source, and + configuration files. + + "Object" form shall mean any form resulting from mechanical transformation + or translation of a Source form, including but not limited to compiled + object code, generated documentation, and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or Object form, + made available under the License, as indicated by a copyright notice that is + included in or attached to the work (an example is provided in the Appendix + below). + + "Derivative Works" shall mean any work, whether in Source or Object form, + that is based on (or derived from) the Work and for which the editorial + revisions, annotations, elaborations, or other modifications represent, as a + whole, an original work of authorship. For the purposes of this License, + Derivative Works shall not include works that remain separable from, or + merely link (or bind by name) to the interfaces of, the Work and Derivative + Works thereof. + + "Contribution" shall mean any work of authorship, including the original + version of the Work and any modifications or additions to that Work or + Derivative Works thereof, that is intentionally submitted to Licensor for + inclusion in the Work by the copyright owner or by an individual or Legal + Entity authorized to submit on behalf of the copyright owner. For the + purposes of this definition, "submitted" means any form of electronic, + verbal, or written communication sent to the Licensor or its + representatives, including but not limited to communication on electronic + mailing lists, source code control systems, and issue tracking systems that + are managed by, or on behalf of, the Licensor for the purpose of discussing + and improving the Work, but excluding communication that is conspicuously + marked or otherwise designated in writing by the copyright owner as "Not a + Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity on + behalf of whom a Contribution has been received by Licensor and subsequently + incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this + License, each Contributor hereby grants to You a perpetual, worldwide, + non-exclusive, no-charge, royalty-free, irrevocable copyright license to + reproduce, prepare Derivative Works of, publicly display, publicly perform, + sublicense, and distribute the Work and such Derivative Works in Source or + Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this + License, each Contributor hereby grants to You a perpetual, worldwide, + non-exclusive, no-charge, royalty-free, irrevocable (except as stated in + this section) patent license to make, have made, use, offer to sell, sell, + import, and otherwise transfer the Work, where such license applies only to + those patent claims licensable by such Contributor that are necessarily + infringed by their Contribution(s) alone or by combination of their + Contribution(s) with the Work to which such Contribution(s) was submitted. + If You institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work or a + Contribution incorporated within the Work constitutes direct or contributory + patent infringement, then any patent licenses granted to You under this + License for that Work shall terminate as of the date such litigation is + filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or + Derivative Works thereof in any medium, with or without modifications, and + in Source or Object form, provided that You meet the following conditions: + + (a) You must give any other recipients of the Work or Derivative Works a + copy of this License; and + + (b) You must cause any modified files to carry prominent notices stating + that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works that You + distribute, all copyright, patent, trademark, and attribution notices from + the Source form of the Work, excluding those notices that do not pertain to + any part of the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its distribution, + then any Derivative Works that You distribute must include a readable copy + of the attribution notices contained within such NOTICE file, excluding + those notices that do not pertain to any part of the Derivative Works, in at + least one of the following places: within a NOTICE text file distributed as + part of the Derivative Works; within the Source form or documentation, if + provided along with the Derivative Works; or, within a display generated by + the Derivative Works, if and wherever such third-party notices normally + appear. The contents of the NOTICE file are for informational purposes only + and do not modify the License. You may add Your own attribution notices + within Derivative Works that You distribute, alongside or as an addendum to + the NOTICE text from the Work, provided that such additional attribution + notices cannot be construed as modifying the License. + + You may add Your own copyright statement to Your modifications and may + provide additional or different license terms and conditions for use, + reproduction, or distribution of Your modifications, or for any such + Derivative Works as a whole, provided Your use, reproduction, and + distribution of the Work otherwise complies with the conditions stated in + this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any + Contribution intentionally submitted for inclusion in the Work by You to the + Licensor shall be under the terms and conditions of this License, without + any additional terms or conditions. Notwithstanding the above, nothing + herein shall supersede or modify the terms of any separate license agreement + you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, + trademarks, service marks, or product names of the Licensor, except as + required for reasonable and customary use in describing the origin of the + Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in + writing, Licensor provides the Work (and each Contributor provides its + Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied, including, without limitation, any + warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or + FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining + the appropriateness of using or redistributing the Work and assume any risks + associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in + tort (including negligence), contract, or otherwise, unless required by + applicable law (such as deliberate and grossly negligent acts) or agreed to + in writing, shall any Contributor be liable to You for damages, including + any direct, indirect, special, incidental, or consequential damages of any + character arising as a result of this License or out of the use or inability + to use the Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all other + commercial damages or losses), even if such Contributor has been advised of + the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or + Derivative Works thereof, You may choose to offer, and charge a fee for, + acceptance of support, warranty, indemnity, or other liability obligations + and/or rights consistent with this License. However, in accepting such + obligations, You may act only on Your own behalf and on Your sole + responsibility, not on behalf of any other Contributor, and only if You + agree to indemnify, defend, and hold each Contributor harmless for any + liability incurred by, or claims asserted against, such Contributor by + reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright 2025 Pierre Computer Company + +Licensed under the Apache License, Version 2.0 (the "License"); you may not use +this file except in compliance with the License. You may obtain a copy of the +License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software distributed +under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. diff --git a/packages/git-storage-sdk-python/MANIFEST.in b/packages/git-storage-sdk-python/MANIFEST.in new file mode 100644 index 000000000..5a7b7d0a7 --- /dev/null +++ b/packages/git-storage-sdk-python/MANIFEST.in @@ -0,0 +1,5 @@ +include README.md +include LICENSE +include pyproject.toml +recursive-include pierre_storage *.py +recursive-include pierre_storage py.typed diff --git a/packages/git-storage-sdk-python/PROJECT_SUMMARY.md b/packages/git-storage-sdk-python/PROJECT_SUMMARY.md new file mode 100644 index 000000000..c915ccaf0 --- /dev/null +++ b/packages/git-storage-sdk-python/PROJECT_SUMMARY.md @@ -0,0 +1,264 @@ +# Pierre Git Storage Python SDK - Project Summary + +## ✅ Completed - Version 0.4.2 + +A fully-functional, production-ready Python SDK for Pierre Git Storage, mirroring the TypeScript SDK functionality. + +### Latest Updates (v0.4.2) +- ✅ One-shot diff commits via `create_commit_from_diff` +- ✅ Shared commit option normalization across builder/diff flows +- ✅ Expanded SDK documentation and quick-start examples +- ✅ Version bump to keep PyPI metadata current + +## 📊 Project Statistics + +- **Total Code**: ~2,935 lines +- **Core Modules**: 8 Python modules +- **Test Files**: 2 test modules +- **Unit Tests**: 26 tests (100% passing ✅) +- **Test Coverage**: 45% overall (93% for client.py, 83% for webhook.py) +- **Python Version**: 3.8+ support + +## 🗂️ Project Structure + +``` +packages/git-storage-sdk-python/ +├── pierre_storage/ # Main package +│ ├── __init__.py # Public API exports +│ ├── auth.py # JWT authentication (ES256/RS256/EdDSA) +│ ├── client.py # GitStorage main client (93% coverage) +│ ├── commit.py # CommitBuilder with 4MB streaming +│ ├── errors.py # ApiError & RefUpdateError +│ ├── repo.py # All repository operations +│ ├── types.py # TypedDict type definitions +│ ├── webhook.py # HMAC webhook validation (83% coverage) +│ └── py.typed # PEP 561 type marker +│ +├── tests/ # Test suite +│ ├── conftest.py # Shared fixtures +│ ├── test_client.py # Client & JWT tests (18 tests) +│ └── test_webhook.py # Webhook tests (8 tests) +│ +├── scripts/ +│ └── setup.sh # Moon setup script +│ +├── pyproject.toml # Modern Python packaging +├── moon.yml # Moon task configuration +├── README.md # Complete documentation +├── QUICKSTART.md # Getting started guide +├── CONTRIBUTING.md # Contribution guidelines +├── DEVELOPMENT.md # Technical architecture docs +└── LICENSE # MIT license +``` + +## 🎯 Key Features + +### Core Functionality +- ✅ Repository creation and management +- ✅ JWT-based authentication (ES256, RS256, EdDSA) +- ✅ Public JWT helper for manual token generation +- ✅ File operations (get stream, list) +- ✅ Branch & commit listing with pagination +- ✅ Branch and commit diffs +- ✅ Pull from upstream +- ✅ Restore commits +- ✅ Webhook signature validation + +### Developer Experience +- ✅ Full type hints throughout +- ✅ Async/await API +- ✅ Fluent commit builder API +- ✅ Streaming support for large files (4MB chunks) +- ✅ Comprehensive error handling +- ✅ Well-documented with docstrings +- ✅ Unit test coverage + +## 🧪 Testing + +### Running Tests + +```bash +# Using virtual environment +cd packages/git-storage-sdk-python +python3 -m venv venv +source venv/bin/activate +pip install -e ".[dev]" +pytest -v + +# Using Moon +moon run git-storage-sdk-python:setup +moon run git-storage-sdk-python:test +``` + +### Test Results +``` +26 passed in 0.11s ✅ +Coverage: 45% overall +- client.py: 93% ⭐ +- webhook.py: 83% ⭐ +- types.py: 100% ⭐ +``` + +## 📦 Dependencies + +### Required (Runtime) +- `httpx` - Async HTTP client with streaming +- `pyjwt` - JWT encoding/decoding +- `cryptography` - Key management +- `pydantic` - Data validation +- `typing-extensions` - Type hint backports (Python 3.8-3.9) + +### Development +- `pytest` - Test framework +- `pytest-asyncio` - Async test support +- `pytest-cov` - Coverage reporting +- `mypy` - Type checking +- `ruff` - Fast linting and formatting + +## 🚀 Usage Examples + +### Basic Usage + +```python +from pierre_storage import GitStorage + +# Initialize client +storage = GitStorage({ + "name": "your-name", + "key": "your-private-key-pem", +}) + +# Create repository +repo = await storage.create_repo() + +# Create commit with streaming +result = await ( + repo.create_commit({ + "target_branch": "main", + "commit_message": "Initial commit", + "author": {"name": "Bot", "email": "bot@example.com"}, + }) + .add_file_from_string("README.md", "# My Project") + .add_file("data.bin", large_file_stream) + .send() +) + +print(f"Commit: {result['commit_sha']}") + +# Apply an existing diff without using the builder +diff_text = """\ +--- a/README.md ++++ b/README.md +@@ +-Old line ++New line +""" + +result = await repo.create_commit_from_diff( + target_branch="main", + commit_message="Apply diff", + diff=diff_text, + author={"name": "Bot", "email": "bot@example.com"}, + base_branch="release", # optional +) +print(f"Diff commit: {result['commit_sha']}") +``` + +### Manual JWT Generation + +```python +from pierre_storage import generate_jwt + +# Generate JWT token directly +token = generate_jwt( + key_pem=private_key, + issuer="your-name", + repo_id="repo-id", + scopes=["git:write", "git:read"], + ttl=3600 +) + +# Use in Git URL +git_url = f"https://t:{token}@your-name.code.storage/repo-id.git" +``` + +## 🔧 Moon Tasks + +Available tasks in `moon.yml`: + +```bash +moon run :setup # Create venv and install deps +moon run :test # Run unit tests +moon run :test-coverage # Run tests with coverage report +moon run :typecheck # Run mypy type checking +moon run :lint # Run ruff linting +moon run :format # Check code formatting +moon run :format-write # Auto-format code +moon run :build # Build distributable package +moon run :clean # Clean all generated files +``` + +## 📝 Documentation + +- **README.md** - Complete API reference and usage examples +- **QUICKSTART.md** - Quick start guide for new users +- **CONTRIBUTING.md** - Development workflow and guidelines +- **DEVELOPMENT.md** - Architecture and technical details +- **PROJECT_SUMMARY.md** - This file + +## 🎨 Code Quality + +- **Type Safety**: Full mypy type hints +- **Linting**: Ruff configured for modern Python +- **Formatting**: Ruff formatter +- **Testing**: Comprehensive unit test suite +- **Documentation**: Docstrings on all public APIs + +## 🌟 Highlights + +1. **Feature Parity**: 100% feature parity with TypeScript SDK +2. **Pythonic API**: Follows Python conventions and best practices +3. **Async First**: All I/O operations are async for performance +4. **Streaming**: Large file support with 4MB chunking +5. **Type Safe**: Full type hints for IDE support +6. **Well Tested**: 26 unit tests covering core functionality +7. **Production Ready**: Error handling, validation, documentation + +## 📋 Future Enhancements + +Potential improvements (not required for v0.1.2): + +- [ ] Increase test coverage to 80%+ +- [ ] Add retry logic with exponential backoff +- [ ] Add progress callbacks for uploads +- [ ] Add caching layer for frequently accessed data +- [ ] Add batch operation optimizations +- [ ] Integration tests (optional) + +## 🎓 Comparison with TypeScript SDK + +| Feature | TypeScript | Python | Status | +|---------|-----------|--------|--------| +| Repository operations | ✅ | ✅ | Complete | +| JWT authentication | ✅ | ✅ | Complete | +| Commit builder | ✅ | ✅ | Complete | +| File streaming | ✅ | ✅ | Complete | +| Webhook validation | ✅ | ✅ | Complete | +| Error handling | ✅ | ✅ | Complete | +| Type definitions | ✅ | ✅ | Complete | +| Documentation | ✅ | ✅ | Complete | +| Unit tests | ✅ | ✅ | Complete | + +## ✨ Ready for Use + +The Python SDK is **production-ready** and can be: +- ✅ Published to PyPI +- ✅ Used in production applications +- ✅ Integrated into existing Python projects +- ✅ Extended with additional features + +## 📞 Support + +- GitHub Issues: For bug reports and feature requests +- Documentation: See README.md for complete API reference +- Examples: See QUICKSTART.md for usage examples diff --git a/packages/git-storage-sdk-python/PUBLISHING.md b/packages/git-storage-sdk-python/PUBLISHING.md new file mode 100644 index 000000000..55773b65f --- /dev/null +++ b/packages/git-storage-sdk-python/PUBLISHING.md @@ -0,0 +1,400 @@ +# Publishing to PyPI - Complete Guide + +This guide walks you through publishing the `pierre-storage` package to PyPI for the first time. + +## Prerequisites + +### 1. Create PyPI Account + +First, you need accounts on both PyPI and TestPyPI (for testing): + +1. **PyPI (production)**: https://pypi.org/account/register/ +2. **TestPyPI (testing)**: https://test.pypi.org/account/register/ + +> **Note**: These are separate accounts, so register on both! + +### 2. Verify Your Email + +After registering, check your email and verify your account on both sites. + +### 3. Enable 2FA (Required for PyPI) + +PyPI requires two-factor authentication: + +1. Go to https://pypi.org/manage/account/ +2. Click "Add 2FA with authentication application" +3. Use an app like Google Authenticator, Authy, or 1Password +4. Save the recovery codes somewhere safe! + +Do the same for TestPyPI if you want (optional but recommended). + +### 4. Create API Tokens + +Instead of using passwords, we'll use API tokens (more secure): + +#### For TestPyPI (testing): +1. Go to https://test.pypi.org/manage/account/token/ +2. Click "Add API token" +3. Token name: `pierre-storage-test` +4. Scope: "Entire account" (for first upload) +5. Copy the token (starts with `pypi-...`) +6. **Save it immediately** - you won't see it again! + +#### For PyPI (production): +1. Go to https://pypi.org/manage/account/token/ +2. Click "Add API token" +3. Token name: `pierre-storage` +4. Scope: "Entire account" (for first upload) +5. Copy the token +6. **Save it securely** (password manager, environment variable, etc.) + +## Step-by-Step Publishing Process + +### Step 1: Install Publishing Tools + +```bash +cd packages/git-storage-sdk-python + +# With uv (recommended) +uv sync + +# Or with traditional venv +source venv/bin/activate +pip install build twine +``` + +### Step 2: Prepare the Package + +Make sure everything is ready: + +```bash +# Run tests to ensure everything works +uv run pytest -v + +# Type check +uv run mypy pierre_storage + +# Lint check +uv run ruff check pierre_storage + +# Format check +uv run ruff format --check pierre_storage +``` + +All should pass ✅ + +### Step 3: Build the Package + +```bash +# Clean any old builds +rm -rf dist/ build/ *.egg-info + +# Build the package with Moon (recommended - cleaner output) +moon git-storage-sdk-python:build + +# Or build directly +uv build + +# You should see output like: +# Successfully built pierre_storage-0.4.2.tar.gz and pierre_storage-0.4.2-py3-none-any.whl +``` + +This creates two files in `dist/`: +- `pierre_storage-0.4.2-py3-none-any.whl` (wheel - preferred format) +- `pierre-storage-0.4.2.tar.gz` (source distribution) + +### Step 4: Check the Package + +Before uploading, verify the package is correct: + +```bash +# Check package metadata and contents +uv run twine check dist/* + +# Should output: +# Checking dist/pierre_storage-0.4.2-py3-none-any.whl: PASSED +# Checking dist/pierre-storage-0.4.2.tar.gz: PASSED +``` + +### Step 5: Test Upload to TestPyPI (RECOMMENDED) + +Always test on TestPyPI first! + +```bash +# Upload to TestPyPI +uv run twine upload --repository testpypi dist/* + +# You'll be prompted: +# Enter your username: __token__ +# Enter your password: [paste your TestPyPI token starting with pypi-...] +``` + +> **Important**: Username is literally `__token__` (with two underscores), not your username! + +If successful, you'll see: +``` +Uploading pierre_storage-0.4.2-py3-none-any.whl +Uploading pierre-storage-0.4.2.tar.gz +View at: https://test.pypi.org/project/pierre-storage/0.4.2/ +``` + +### Step 6: Test Installation from TestPyPI + +Test that people can actually install it: + +```bash +# Test with uv in an isolated environment +uv run --isolated --index https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple/ \ + python -c "from pierre_storage import GitStorage; print('Success!')" + +# Or create a new virtual environment for testing +python3 -m venv test-env +source test-env/bin/activate +pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple/ pierre-storage +python -c "from pierre_storage import GitStorage; print('Success!')" +deactivate +rm -rf test-env +``` + +> **Note**: We use `--extra-index-url` because dependencies (httpx, pyjwt, etc.) are on the real PyPI, not TestPyPI. + +### Step 7: Upload to Real PyPI 🚀 + +If TestPyPI worked perfectly, upload to the real PyPI: + +```bash +# Make sure you're in the SDK directory +cd packages/git-storage-sdk-python + +# Upload to PyPI +uv run twine upload dist/* + +# Enter credentials: +# Username: __token__ +# Password: [paste your PyPI token] +``` + +Success! 🎉 + +You'll see: +``` +Uploading pierre_storage-0.4.2-py3-none-any.whl +Uploading pierre-storage-0.4.2.tar.gz +View at: https://pypi.org/project/pierre-storage/0.4.2/ +``` + +### Step 8: Verify Installation + +Test the real installation: + +```bash +# With uv +uv run --isolated python -c "from pierre_storage import GitStorage; print('Success!')" + +# Or with traditional venv +python3 -m venv verify-env +source verify-env/bin/activate +pip install pierre-storage + +# Verify +python -c "from pierre_storage import GitStorage; print('Installed successfully!')" + +# Clean up +deactivate +rm -rf verify-env +``` + +## Using a `.pypirc` File (Optional but Recommended) + +Instead of entering tokens each time, create a `~/.pypirc` file: + +```bash +nano ~/.pypirc +``` + +Add this content: + +```ini +[distutils] +index-servers = + pypi + testpypi + +[pypi] +username = __token__ +password = pypi-YOUR-PRODUCTION-TOKEN-HERE + +[testpypi] +repository = https://test.pypi.org/legacy/ +username = __token__ +password = pypi-YOUR-TEST-TOKEN-HERE +``` + +**Secure the file:** +```bash +chmod 600 ~/.pypirc +``` + +Now you can upload without entering credentials: + +```bash +# Upload to TestPyPI +twine upload --repository testpypi dist/* + +# Upload to PyPI +twine upload dist/* +``` + +## Publishing Updates + +When you release a new version: + +### 1. Update Version Number + +Edit `pyproject.toml`: + +```toml +[project] +version = "0.1.3" # Increment version +``` + +### 2. Update CHANGELOG (if you have one) + +Document what changed. + +### 3. Build and Upload + +```bash +# Clean old builds +rm -rf dist/ build/ *.egg-info + +# Run tests +pytest -v + +# Build +python -m build + +# Check +twine check dist/* + +# Upload to TestPyPI first +twine upload --repository testpypi dist/* + +# Test installation +pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple/ pierre-storage==0.1.3 + +# If good, upload to PyPI +twine upload dist/* +``` + +## Troubleshooting + +### Error: "File already exists" + +You can't re-upload the same version. You must increment the version number. + +**Solution**: Update version in `pyproject.toml`, rebuild, and upload. + +### Error: "Invalid username or password" + +Common mistakes: +- Username should be `__token__` (with two underscores), not your PyPI username +- Password should be the full token starting with `pypi-` +- Make sure you're using the right token (TestPyPI vs PyPI) + +### Error: "403 Forbidden" + +You don't have permission to upload to that package name. + +**Solutions**: +- If it's your first upload, this shouldn't happen +- If someone else owns the name, you need to choose a different name +- Make sure you're logged in to the right account + +### Package not found after upload + +Wait a few minutes - PyPI can take 5-15 minutes to index new packages. + +### Import error after installation + +Make sure: +- Your package structure is correct +- `__init__.py` exports the right things +- You're testing in a fresh virtual environment + +## Using Moon for Building + +You can also use Moon tasks: + +```bash +# Build package +moon run git-storage-sdk-python:build + +# Then upload +cd packages/git-storage-sdk-python +twine upload dist/* +``` + +## Security Best Practices + +1. **Never commit tokens** to git +2. **Use API tokens**, not passwords +3. **Scope tokens** to specific projects (after first upload) +4. **Rotate tokens** periodically +5. **Enable 2FA** on PyPI +6. **Keep .pypirc secure** (`chmod 600`) + +## Scoped Tokens (After First Upload) + +After your first successful upload, create project-scoped tokens for better security: + +### For PyPI: +1. Go to https://pypi.org/manage/project/pierre-storage/settings/ +2. Scroll to "API tokens" +3. Create new token with scope: "Project: pierre-storage" +4. Update your `~/.pypirc` with the new token + +### For TestPyPI: +Do the same at https://test.pypi.org/manage/project/pierre-storage/settings/ + +## Quick Reference + +```bash +# One-time setup +pip install build twine + +# For each release +rm -rf dist/ build/ *.egg-info +pytest -v +python -m build +twine check dist/* +twine upload --repository testpypi dist/* # Test first +twine upload dist/* # Then production +``` + +## Next Steps After Publishing + +1. **Add PyPI badge to README**: + ```markdown + [![PyPI version](https://badge.fury.io/py/pierre-storage.svg)](https://badge.fury.io/py/pierre-storage) + ``` + +2. **Announce the release**: + - Tweet about it + - Post on relevant forums + - Update documentation + +3. **Monitor**: + - Check https://pypi.org/project/pierre-storage/ for stats + - Watch for issues on GitHub + +## Resources + +- [PyPI Help](https://pypi.org/help/) +- [Packaging Python Projects](https://packaging.python.org/tutorials/packaging-projects/) +- [Twine Documentation](https://twine.readthedocs.io/) +- [PyPI API Tokens](https://pypi.org/help/#apitoken) + +--- + +**Congratulations on your first PyPI package! 🎉** diff --git a/packages/git-storage-sdk-python/QUICKSTART.md b/packages/git-storage-sdk-python/QUICKSTART.md new file mode 100644 index 000000000..abdd3a071 --- /dev/null +++ b/packages/git-storage-sdk-python/QUICKSTART.md @@ -0,0 +1,230 @@ +# Quick Start Guide + +This guide will help you get started with the Pierre Git Storage Python SDK. + +## Installation + +```bash +# Using uv (recommended) +uv add pierre-storage + +# Or using pip +pip install pierre-storage +``` + +## Your First Repository + +Here's a minimal example to create a repository and make your first commit: + +```python +import asyncio +from pierre_storage import GitStorage + +async def main(): + # Initialize the client + storage = GitStorage({ + "name": "your-name", + "key": """-----BEGIN PRIVATE KEY----- + Your private key here + -----END PRIVATE KEY-----""", + }) + + # Create a repository + repo = await storage.create_repo() + print(f"Created repository: {repo.id}") + + # Get the Git remote URL + url = await repo.get_remote_url() + print(f"Git URL: {url}") + + # Create your first commit + result = await ( + repo.create_commit({ + "target_branch": "main", + "commit_message": "Initial commit", + "author": { + "name": "Your Name", + "email": "you@example.com" + }, + }) + .add_file_from_string("README.md", "# My Project\n\nWelcome!") + .send() + ) + + print(f"Commit SHA: {result['commit_sha']}") + +# Run the example +asyncio.run(main()) +``` + +## Reading Repository Data + +```python +async def read_data(): + storage = GitStorage({"name": "your-name", "key": "your-key"}) + repo = await storage.find_one({"id": "repo-id"}) + + # List files + files = await repo.list_files() + print("Files:", files["paths"]) + + # List commits + commits = await repo.list_commits({"limit": 10}) + for commit in commits["commits"]: + print(f"{commit['sha'][:7]} - {commit['message']}") + + # Get file content + response = await repo.get_file_stream({"path": "README.md"}) + content = await response.aread() + print(content.decode()) + +asyncio.run(read_data()) +``` + +## Making Changes + +```python +async def make_changes(): + storage = GitStorage({"name": "your-name", "key": "your-key"}) + repo = await storage.find_one({"id": "repo-id"}) + + # Update and delete files in one commit + result = await ( + repo.create_commit({ + "target_branch": "main", + "commit_message": "Update documentation", + "author": {"name": "Bot", "email": "bot@example.com"}, + }) + .add_file_from_string("docs/guide.md", "# Guide\n\nNew content") + .add_file_from_string("README.md", "# Updated\n\nRevised readme") + .delete_path("old-file.txt") + .send() + ) + + print(f"New commit: {result['commit_sha']}") + +asyncio.run(make_changes()) +``` + +### Applying a Diff Directly + +If you already have a unified diff (for example, generated by `git diff`), you can apply it without building file operations manually: + +```python +async def apply_diff(): + storage = GitStorage({"name": "your-name", "key": "your-key"}) + repo = await storage.find_one({"id": "repo-id"}) + + diff_text = """\ +--- a/README.md ++++ b/README.md +@@ +-Old line ++New line +""" + + result = await repo.create_commit_from_diff( + target_branch="main", + commit_message="Apply upstream changes", + diff=diff_text, + author={"name": "Automation", "email": "bot@example.com"}, + base_branch="main", # optional, matches create_commit options + ) + + print(f"Updated commit: {result['commit_sha']}") + +asyncio.run(apply_diff()) +``` + +## Using with Git CLI + +Once you have the remote URL, you can use it with standard Git commands: + +```bash +# Clone the repository +git clone + +# Or add as a remote to existing repo +git remote add origin + +# Push changes +git push origin main +``` + +## Generating JWT Tokens Manually + +For advanced use cases, you can generate JWT tokens directly: + +```python +from pierre_storage import generate_jwt + +# Read your private key +with open("path/to/key.pem", "r") as f: + private_key = f.read() + +# Generate a JWT token +token = generate_jwt( + key_pem=private_key, + issuer="your-name", + repo_id="your-repo-id", + scopes=["git:write", "git:read"], # Optional + ttl=3600, # 1 hour, optional +) + +# Use in Git URL +git_url = f"https://t:{token}@your-name.code.storage/your-repo-id.git" +print(f"Git URL: {git_url}") +``` + +This is useful when you need: +- Custom token expiration times +- Specific permission scopes +- Tokens for external tools +- Fine-grained access control + +## Environment Configuration + +For production use, configure the SDK with your environment: + +```python +storage = GitStorage({ + "name": "your-company", + "key": your_private_key, + "api_base_url": "https://api.code.storage", + "storage_base_url": "code.storage", +}) +``` + +## Error Handling + +```python +from pierre_storage import ApiError, RefUpdateError + +try: + repo = await storage.create_repo({"id": "my-repo"}) +except ApiError as e: + if e.status_code == 409: + print("Repository already exists") + repo = await storage.find_one({"id": "my-repo"}) + else: + raise + +try: + result = await builder.send() +except RefUpdateError as e: + print(f"Commit failed: {e.message}") + print(f"Reason: {e.reason}") + # Handle conflict, retry, or notify user +``` + +## Next Steps + +- Read the [full documentation](README.md) for detailed API reference +- Check out [webhook validation](README.md#webhook-validation) for integrating with events +- See [examples/](examples/) for more complex use cases + +## Need Help? + +- Check the [README](README.md) for comprehensive documentation +- Open an issue on GitHub +- Contact support at support@pierre.io diff --git a/packages/git-storage-sdk-python/README.md b/packages/git-storage-sdk-python/README.md new file mode 100644 index 000000000..7b17251d9 --- /dev/null +++ b/packages/git-storage-sdk-python/README.md @@ -0,0 +1,790 @@ +# pierre-storage + +Pierre Git Storage SDK for Python applications. + +## Installation + +```bash +# Using uv (recommended) +uv add pierre-storage + +# Or using pip +pip install pierre-storage +``` + +## Usage + +### Basic Setup + +```python +from pierre_storage import GitStorage + +# Initialize the client with your name and key +storage = GitStorage({ + "name": "your-name", # e.g., 'v0' + "key": "your-key", # Your API key in PEM format +}) +``` + +### Creating a Repository + +```python +# Create a new repository with auto-generated ID +repo = await storage.create_repo() +print(repo.id) # e.g., '123e4567-e89b-12d3-a456-426614174000' + +# Create a repository with custom ID +custom_repo = await storage.create_repo(id="my-custom-repo") +print(custom_repo.id) # 'my-custom-repo' + +# Create a repository with GitHub sync +github_repo = await storage.create_repo( + id="my-synced-repo", + base_repo={ + "owner": "octocat", + "name": "Hello-World", + "default_branch": "main", # optional + } +) +# This repository will sync with github.com/octocat/Hello-World + +# Create a repository by forking an existing repo +forked_repo = await storage.create_repo( + id="my-fork", + base_repo={ + "id": "my-template-id", + "ref": "main", # optional + }, +) +``` + +### Finding a Repository + +```python +found_repo = await storage.find_one(id="repo-id") +if found_repo: + url = await found_repo.get_remote_url() + print(f"Repository URL: {url}") +``` + +### Grep + +```python +result = await repo.grep( + pattern="TODO", + ref="main", + paths=["src/"], + case_sensitive=True, +) +print(result["matches"]) +``` + +### Getting Remote URLs + +The SDK generates secure URLs with JWT authentication for Git operations: + +```python +# Get URL with default permissions (git:write, git:read) and 1-year TTL +url = await repo.get_remote_url() +# Returns: https://t:JWT@your-name.code.storage/repo-id.git + +# Configure the Git remote +print(f"Run: git remote add origin {url}") + +# Get URL with custom permissions and TTL +read_only_url = await repo.get_remote_url( + permissions=["git:read"], # Read-only access + ttl=3600, # 1 hour in seconds +) + +# Get ephemeral remote URL (points to ephemeral namespace) +ephemeral_url = await repo.get_ephemeral_remote_url() +# Returns: https://t:JWT@your-name.code.storage/repo-id+ephemeral.git + +# Get ephemeral URL with custom permissions and TTL +ephemeral_url = await repo.get_ephemeral_remote_url( + permissions=["git:write", "git:read"], + ttl=3600, +) + +# Available permissions: +# - 'git:read' - Read access to Git repository +# - 'git:write' - Write access to Git repository +# - 'repo:write' - Create a repository +``` + +### Working with Repository Content + +Once you have a repository instance, you can perform various Git operations: + +```python +repo = await storage.create_repo() +# or +repo = await storage.find_one(id="existing-repo-id") + +# List repositories for the org +repos = await storage.list_repos(limit=20) +print(repos["repos"]) + +# Get file content (streaming) +response = await repo.get_file_stream( + path="README.md", + ref="main", # optional, defaults to default branch + ephemeral=False, # optional, set to True to read from ephemeral namespace +) +text = await response.aread() +print(text.decode()) + +# List all files in the repository +files = await repo.list_files( + ref="main", # optional, defaults to default branch + ephemeral=False, # optional, set to True to list files from ephemeral namespace +) +print(files["paths"]) # List of file paths + +# List branches +branches = await repo.list_branches( + limit=10, + cursor=None, # for pagination +) +print(branches["branches"]) + +# Create or promote a branch (synchronous Temporal workflow) +branch_result = await repo.create_branch( + base_branch="main", + target_branch="feature/preview", + base_is_ephemeral=False, # set True when the base lives in the ephemeral namespace + target_is_ephemeral=True, # set True to create an ephemeral branch + ttl=900, # optional JWT TTL in seconds +) +print(branch_result["target_branch"], branch_result.get("commit_sha")) + +# List commits +commits = await repo.list_commits( + branch="main", # optional + limit=20, + cursor=None, # for pagination +) +print(commits["commits"]) + +# Read a git note for a commit +note = await repo.get_note(sha="abc123...") +print(note["note"]) + +# Add a git note +note_result = await repo.create_note( + sha="abc123...", + note="Release QA approved", + author={"name": "Release Bot", "email": "release@example.com"}, +) +print(note_result["new_ref_sha"]) + +# Append to a git note +await repo.append_note( + sha="abc123...", + note="Follow-up review complete", +) + +# Delete a git note +await repo.delete_note(sha="abc123...") + +# Get branch diff +branch_diff = await repo.get_branch_diff( + branch="feature-branch", + base="main", # optional, defaults to main + ephemeral=False, # optional, resolve branch under ephemeral namespace + ephemeral_base=False, # optional, resolve base under ephemeral namespace +) +print(branch_diff["stats"]) +print(branch_diff["files"]) + +# Get commit diff +commit_diff = await repo.get_commit_diff( + sha="abc123...", +) +print(commit_diff["stats"]) +print(commit_diff["files"]) +``` + +### Creating Commits + +The SDK provides a fluent builder API for creating commits with streaming support: + +```python +# Create a commit +result = await ( + repo.create_commit( + target_branch="main", + commit_message="Update docs", + author={"name": "Docs Bot", "email": "docs@example.com"}, + ) + .add_file_from_string("docs/changelog.md", "# v2.0.2\n- add streaming SDK\n") + .add_file("docs/readme.md", b"Binary content here") + .delete_path("docs/legacy.txt") + .send() +) + +print(result["commit_sha"]) +print(result["ref_update"]["new_sha"]) +print(result["ref_update"]["old_sha"]) # All zeroes when ref is created +``` + +The builder exposes: + +- `add_file(path, source, *, mode=None)` - Attach bytes from various sources +- `add_file_from_string(path, contents, encoding="utf-8", *, mode=None)` - Add text files (defaults to UTF-8) +- `delete_path(path)` - Remove files or folders +- `send()` - Finalize the commit and receive metadata + +`send()` returns a result with: + +```python +{ + "commit_sha": str, + "tree_sha": str, + "target_branch": str, + "pack_bytes": int, + "blob_count": int, + "ref_update": { + "branch": str, + "old_sha": str, # All zeroes when the ref is created + "new_sha": str, + } +} +``` + +If the backend reports a failure, the builder raises a `RefUpdateError` containing the status, reason, and ref details. + +**Options:** + +- `target_branch` (required): Branch name (without `refs/heads/` prefix) +- `expected_head_sha` (optional): Branch or commit that must match the remote tip +- `base_branch` (optional): Name of the branch to use as the base when creating a new branch (without `refs/heads/` prefix) +- `ephemeral` (optional): Mark the target branch as ephemeral (stored in separate namespace) +- `ephemeral_base` (optional): Indicates the base branch is ephemeral (requires `base_branch`) +- `commit_message` (required): The commit message +- `author` (required): Dictionary with `name` and `email` +- `committer` (optional): Dictionary with `name` and `email` (defaults to author) + +### Creating Commits from Diff Streams + +When you already have a unified diff, you can let the SDK apply it directly without building individual file operations: + +```python +diff_text = """\ +--- a/docs/README.md ++++ b/docs/README.md +@@ +-Old line ++New line +""" + +result = await repo.create_commit_from_diff( + target_branch="main", + commit_message="Apply docs update", + diff=diff_text, + author={"name": "Docs Bot", "email": "docs@example.com"}, + expected_head_sha="abc123...", # optional optimistic lock + base_branch="release", # optional branch fallback +) + +print(result["commit_sha"]) +``` + +`diff` accepts the same source types as the commit builder (string, bytes, async iterator, etc.). The helper automatically streams the diff to the `/diff-commit` endpoint and returns a `CommitResult`. On conflicts or validation errors, it raises `RefUpdateError` with the server-provided status and message. + +You can provide the same metadata options as `create_commit`, including `expected_head_sha`, `base_branch`, `ephemeral`, `ephemeral_base`, and `committer`. + +> Files are chunked into 4 MiB segments, allowing streaming of large assets without buffering in memory. + +> The `target_branch` must already exist on the remote repository. To seed an empty repository, omit `expected_head_sha`; the service will create the first commit only when no refs are present. + +**Branching Example:** + +```python +# Create a new branch off of 'main' +result = await ( + repo.create_commit( + target_branch="feature/new-feature", + base_branch="main", # Branch off from main + commit_message="Start new feature", + author={"name": "Developer", "email": "dev@example.com"}, + ) + .add_file_from_string("feature.py", "# New feature implementation\n") + .send() +) +``` + +### Ephemeral Branches + +Ephemeral branches are temporary branches that are stored in a separate namespace. They're useful for preview environments, temporary workspaces, or short-lived feature branches that don't need to be permanent. + +**Creating an ephemeral branch:** + +```python +# Create an ephemeral branch off of 'main' +result = await ( + repo.create_commit( + target_branch="preview/pr-123", + base_branch="main", + ephemeral=True, # Mark the target branch as ephemeral + commit_message="Preview environment for PR 123", + author={"name": "CI Bot", "email": "ci@example.com"}, + ) + .add_file_from_string("index.html", "

Preview

") + .send() +) + +# Access files from the ephemeral branch +response = await repo.get_file_stream( + path="index.html", + ref="preview/pr-123", + ephemeral=True, # Read from ephemeral namespace +) +content = await response.aread() + +# List files in the ephemeral branch +files = await repo.list_files( + ref="preview/pr-123", + ephemeral=True, +) +print(files["paths"]) +``` + +**Branching from an ephemeral base:** + +```python +# Create an ephemeral branch off another ephemeral branch +result = await ( + repo.create_commit( + target_branch="preview/pr-123-variant", + base_branch="preview/pr-123", + ephemeral=True, + ephemeral_base=True, # Indicates the base branch is also ephemeral + commit_message="Variant of preview environment", + author={"name": "CI Bot", "email": "ci@example.com"}, + ) + .add_file_from_string("variant.txt", "This is a variant\n") + .send() +) +``` + +**Promoting an ephemeral branch:** + +```python +# Promote an ephemeral branch to a persistent branch (keeping the same name) +result = await repo.promote_ephemeral_branch(base_branch="preview/pr-123") + +# Or provide a new target name +result = await repo.promote_ephemeral_branch( + base_branch="preview/pr-123", + target_branch="feature/awesome-change", +) + +print(result["target_branch"]) # "feature/awesome-change" +``` + +**Key points about ephemeral branches:** + +- Ephemeral branches are stored separately from regular branches +- Use `ephemeral=True` when creating commits, reading files, or listing files +- Use `ephemeral_base=True` when branching off another ephemeral branch (requires `base_branch`) +- Promote an ephemeral branch with `repo.promote_ephemeral_branch()`; omit `target_branch` to keep the same name +- Ephemeral branches are ideal for temporary previews, CI/CD environments, or experiments + +### Streaming Large Files + +The commit builder accepts async iterables, allowing streaming of large files: + +```python +async def file_chunks(): + """Generate file chunks asynchronously.""" + with open("/tmp/large-file.zip", "rb") as f: + while chunk := f.read(1024 * 1024): # Read 1MB at a time + yield chunk + +result = await ( + repo.create_commit( + target_branch="assets", + expected_head_sha="abc123...", + commit_message="Upload latest design bundle", + author={"name": "Assets Uploader", "email": "assets@example.com"}, + ) + .add_file("assets/design-kit.zip", file_chunks()) + .send() +) +``` + +### GitHub Repository Sync + +You can create a Pierre repository that syncs with a GitHub repository: + +```python +# Create a repository synced with GitHub +repo = await storage.create_repo( + id="my-synced-repo", + base_repo={ + "owner": "your-org", + "name": "your-repo", + "default_branch": "main", # optional, defaults to "main" + } +) + +# Pull latest changes from GitHub upstream +await repo.pull_upstream() + +# Now you can work with the synced content +files = await repo.list_files() +commits = await repo.list_commits() +``` + +**How it works:** + +1. When you create a repo with `base_repo`, Pierre links it to the specified GitHub repository +2. The `pull_upstream()` method fetches the latest changes from GitHub +3. You can then use all Pierre SDK features (diffs, commits, file access) on the synced content +4. The provider is automatically set to `"github"` when using `base_repo` + +### Forking Repositories + +You can fork an existing repository within the same Pierre org: + +```python +forked_repo = await storage.create_repo( + id="my-fork", + base_repo={ + "id": "my-template-id", + "ref": "main", # optional (branch/tag) + # "sha": "abc123..." # optional commit SHA (overrides ref) + }, +) +``` + +When `default_branch` is omitted, the SDK returns `"main"`. + +### Restoring Commits + +You can restore a repository to a previous commit: + +```python +result = await repo.restore_commit( + target_branch="main", + target_commit_sha="abc123...", # Commit to restore to + expected_head_sha="def456...", # Optional: current HEAD for safety + commit_message="Restore to stable version", + author={"name": "DevOps", "email": "devops@example.com"}, +) + +print(result["commit_sha"]) +print(result["ref_update"]) +``` + +## API Reference + +### GitStorage + +```python +class GitStorage: + def __init__(self, options: GitStorageOptions) -> None: ... + async def create_repo( + self, + *, + id: Optional[str] = None, + default_branch: Optional[str] = None, # defaults to "main" + base_repo: Optional[BaseRepo] = None, + ttl: Optional[int] = None, + ) -> Repo: ... + async def find_one(self, *, id: str) -> Optional[Repo]: ... + def get_config(self) -> GitStorageOptions: ... +``` + +### Repo + +```python +class Repo: + @property + def id(self) -> str: ... + + async def get_remote_url( + self, + *, + permissions: Optional[List[str]] = None, + ttl: Optional[int] = None, + ) -> str: ... + + async def get_ephemeral_remote_url( + self, + *, + permissions: Optional[List[str]] = None, + ttl: Optional[int] = None, + ) -> str: ... + + async def get_file_stream( + self, + *, + path: str, + ref: Optional[str] = None, + ephemeral: Optional[bool] = None, + ttl: Optional[int] = None, + ) -> Response: ... + + async def list_files( + self, + *, + ref: Optional[str] = None, + ephemeral: Optional[bool] = None, + ttl: Optional[int] = None, + ) -> ListFilesResult: ... + + async def list_branches( + self, + *, + limit: Optional[int] = None, + cursor: Optional[str] = None, + ttl: Optional[int] = None, + ) -> ListBranchesResult: ... + + async def create_branch( + self, + *, + base_branch: str, + target_branch: str, + base_is_ephemeral: bool = False, + target_is_ephemeral: bool = False, + ttl: Optional[int] = None, + ) -> CreateBranchResult: ... + + async def promote_ephemeral_branch( + self, + *, + base_branch: str, + target_branch: Optional[str] = None, + ttl: Optional[int] = None, + ) -> CreateBranchResult: ... + + async def list_commits( + self, + *, + branch: Optional[str] = None, + limit: Optional[int] = None, + cursor: Optional[str] = None, + ttl: Optional[int] = None, + ) -> ListCommitsResult: ... + + async def get_branch_diff( + self, + *, + branch: str, + base: Optional[str] = None, + ephemeral: Optional[bool] = None, + ephemeral_base: Optional[bool] = None, + ttl: Optional[int] = None, + ) -> GetBranchDiffResult: ... + + async def get_commit_diff( + self, + *, + sha: str, + ttl: Optional[int] = None, + ) -> GetCommitDiffResult: ... + + async def pull_upstream( + self, + *, + ttl: Optional[int] = None, + ) -> None: ... + + async def restore_commit( + self, + *, + target_branch: str, + target_commit_sha: str, + expected_head_sha: Optional[str] = None, + commit_message: str, + author: CommitSignature, + committer: Optional[CommitSignature] = None, + ttl: Optional[int] = None, + ) -> RestoreCommitResult: ... + + def create_commit( + self, + *, + target_branch: str, + expected_head_sha: Optional[str] = None, + base_branch: Optional[str] = None, + ephemeral: Optional[bool] = None, + ephemeral_base: Optional[bool] = None, + commit_message: str, + author: CommitSignature, + committer: Optional[CommitSignature] = None, + ttl: Optional[int] = None, + ) -> CommitBuilder: ... +``` + +### Type Definitions + +Key types are provided via TypedDict for better IDE support: + +```python +from pierre_storage.types import ( + GitStorageOptions, + BaseRepo, + GitHubBaseRepo, + ForkBaseRepo, + CommitSignature, + CreateCommitOptions, + ListFilesResult, + ListBranchesResult, + ListCommitsResult, + GetBranchDiffResult, + GetCommitDiffResult, + CreateBranchResult, + RestoreCommitResult, + RefUpdate, + # ... and more +) + +# BaseRepo type for GitHub sync or forks +class GitHubBaseRepo(TypedDict, total=False): + provider: Literal["github"] # Always "github" + owner: str # GitHub organization or user + name: str # Repository name + default_branch: Optional[str] # Default branch (optional) + +class ForkBaseRepo(TypedDict, total=False): + id: str # Source repo ID + ref: Optional[str] # Optional ref name + sha: Optional[str] # Optional commit SHA + +BaseRepo = Union[GitHubBaseRepo, ForkBaseRepo] +``` + +## Webhook Validation + +The SDK includes utilities for validating webhook signatures: + +```python +from pierre_storage import validate_webhook + +# Validate webhook signature +result = validate_webhook( + payload=request.body, # Raw payload bytes or string + headers={ + "X-Pierre-Signature": request.headers["X-Pierre-Signature"], + "X-Pierre-Event": request.headers["X-Pierre-Event"], + }, + secret="your-webhook-secret", + options={"max_age_seconds": 300}, # 5 minutes +) + +if result["valid"] and result.get("event_type") == "push": + event = result.get("payload") + if event: + print(f"Push to {event['ref']}") + print(f"Commit: {event['before']} -> {event['after']}") +else: + print(f"Invalid webhook: {result.get('error')}") +``` + +## Authentication + +The SDK uses JWT (JSON Web Tokens) for authentication. When you call `get_remote_url()`, it: + +1. Creates a JWT with your name, repository ID, and requested permissions +2. Signs it with your private key (ES256, RS256, or EdDSA) +3. Embeds it in the Git remote URL as the password + +The generated URLs are compatible with standard Git clients and include all necessary authentication. + +### Manual JWT Generation + +For advanced use cases, you can generate JWTs manually using the `generate_jwt` helper: + +```python +from pierre_storage import generate_jwt + +# Read your private key +with open("path/to/key.pem", "r") as f: + private_key = f.read() + +# Generate a JWT token +token = generate_jwt( + key_pem=private_key, + issuer="your-name", # e.g., 'v0' + repo_id="your-repo-id", + scopes=["git:write", "git:read"], # Optional, defaults to git:write and git:read + ttl=3600, # Optional, defaults to 1 year (31536000 seconds) +) + +# Use the token in your Git URL or API calls +git_url = f"https://t:{token}@your-name.code.storage/your-repo-id.git" +``` + +**Parameters:** + +- `key_pem` (required): Private key in PEM format (PKCS8) +- `issuer` (required): Token issuer (your customer name) +- `repo_id` (required): Repository identifier +- `scopes` (optional): List of permission scopes. Defaults to `["git:write", "git:read"]` + - Available scopes: `"git:read"`, `"git:write"`, `"repo:write"` +- `ttl` (optional): Time-to-live in seconds. Defaults to 31536000 (1 year) + +The function automatically detects the key type (RSA, EC, or EdDSA) and uses the appropriate signing algorithm (RS256, ES256, or EdDSA). + +## Error Handling + +The SDK provides specific error classes: + +```python +from pierre_storage import ApiError, RefUpdateError + +try: + repo = await storage.create_repo(id="existing") +except ApiError as e: + print(f"API error: {e.message}") + print(f"Status code: {e.status_code}") + +try: + result = await builder.send() +except RefUpdateError as e: + print(f"Ref update failed: {e.message}") + print(f"Status: {e.status}") + print(f"Reason: {e.reason}") + print(f"Ref update: {e.ref_update}") +``` + +## Development + +### Setup + +```bash +# Create virtual environment and install dependencies +python3 -m venv venv +source venv/bin/activate +pip install -e ".[dev]" + +# Or use Moon +moon run git-storage-sdk-python:setup + +# Run tests +pytest + +# Run tests with coverage +pytest --cov=pierre_storage --cov-report=html + +# Type checking +mypy pierre_storage + +# Linting +ruff check pierre_storage +``` + +### Building + +```bash +python -m build +``` + +## License + +MIT diff --git a/packages/git-storage-sdk-python/moon.yml b/packages/git-storage-sdk-python/moon.yml new file mode 100644 index 000000000..ead6de44f --- /dev/null +++ b/packages/git-storage-sdk-python/moon.yml @@ -0,0 +1,91 @@ +type: library +language: python +project: + name: git-storage-sdk-python + description: Pierre Git Storage SDK for Python + +tasks: + setup: + command: bash scripts/setup.sh + options: + cache: false + outputs: + - venv/ + + test: + command: ./venv/bin/pytest -v + deps: + - ~:setup + inputs: + - pierre_storage/**/*.py + - tests/**/*.py + - pyproject.toml + + test-coverage: + command: ./venv/bin/pytest --cov=pierre_storage --cov-report=term-missing --cov-report=html + deps: + - ~:setup + inputs: + - pierre_storage/**/*.py + - tests/**/*.py + + typecheck: + command: ./venv/bin/mypy pierre_storage + deps: + - ~:setup + inputs: + - pierre_storage/**/*.py + + lint: + command: ./venv/bin/ruff check pierre_storage + deps: + - ~:setup + inputs: + - pierre_storage/**/*.py + + format: + command: ./venv/bin/ruff format --check pierre_storage + deps: + - ~:setup + inputs: + - pierre_storage/**/*.py + + format-write: + command: ./venv/bin/ruff format pierre_storage + deps: + - ~:setup + inputs: + - pierre_storage/**/*.py + + build: + command: bash -c "PYTHONWARNINGS=ignore ./venv/bin/python -m build" + deps: + - ~:setup + inputs: + - pierre_storage/**/*.py + - pyproject.toml + outputs: + - dist/ + + check-package: + command: ./venv/bin/twine check dist/* + deps: + - ~:build + inputs: + - dist/* + + publish-test: + command: ./venv/bin/twine upload --repository testpypi dist/* + deps: + - ~:check-package + + publish: + command: ./venv/bin/twine upload dist/* + deps: + - ~:check-package + + clean: + command: rm -rf venv/ dist/ build/ *.egg-info htmlcov/ .coverage .pytest_cache/ .mypy_cache/ .ruff_cache/ + options: + cache: false + shell: true diff --git a/packages/git-storage-sdk-python/pierre_storage/__init__.py b/packages/git-storage-sdk-python/pierre_storage/__init__.py new file mode 100644 index 000000000..254c5e25e --- /dev/null +++ b/packages/git-storage-sdk-python/pierre_storage/__init__.py @@ -0,0 +1,90 @@ +"""Pierre Git Storage SDK for Python. + +A Python SDK for interacting with Pierre's git storage system. +""" + +from pierre_storage.auth import generate_jwt +from pierre_storage.client import GitStorage, create_client +from pierre_storage.errors import ApiError, RefUpdateError +from pierre_storage.types import ( + BaseRepo, + BranchInfo, + CommitInfo, + CommitResult, + CommitSignature, + CreateBranchResult, + DeleteRepoResult, + DiffFileState, + DiffStats, + FileDiff, + FilteredFile, + GetBranchDiffResult, + GetCommitDiffResult, + GitStorageOptions, + GrepFileMatch, + GrepLine, + GrepResult, + ListBranchesResult, + ListCommitsResult, + ListFilesResult, + ListReposResult, + NoteReadResult, + NoteWriteResult, + RefUpdate, + Repo, + RepoInfo, + RestoreCommitResult, +) +from pierre_storage.version import PACKAGE_VERSION +from pierre_storage.webhook import ( + WebhookPushEvent, + parse_signature_header, + validate_webhook, + validate_webhook_signature, +) + +__version__ = PACKAGE_VERSION + +__all__ = [ + # Main client + "GitStorage", + "create_client", + # Auth + "generate_jwt", + # Errors + "ApiError", + "RefUpdateError", + # Types + "BaseRepo", + "BranchInfo", + "CreateBranchResult", + "CommitInfo", + "CommitResult", + "CommitSignature", + "DeleteRepoResult", + "DiffFileState", + "DiffStats", + "FileDiff", + "FilteredFile", + "GetBranchDiffResult", + "GetCommitDiffResult", + "GrepFileMatch", + "GrepLine", + "GrepResult", + "GitStorageOptions", + "ListBranchesResult", + "ListCommitsResult", + "ListFilesResult", + "ListReposResult", + "NoteReadResult", + "NoteWriteResult", + "RefUpdate", + "RepoInfo", + "Repo", + "RestoreCommitResult", + # Webhook + "WebhookPushEvent", + "parse_signature_header", + "validate_webhook", + "validate_webhook_signature", +] diff --git a/packages/git-storage-sdk-python/pierre_storage/auth.py b/packages/git-storage-sdk-python/pierre_storage/auth.py new file mode 100644 index 000000000..4a06acbc7 --- /dev/null +++ b/packages/git-storage-sdk-python/pierre_storage/auth.py @@ -0,0 +1,77 @@ +"""JWT authentication utilities for Pierre Git Storage SDK.""" + +import time +from typing import List, Optional + +import jwt +from cryptography.hazmat.primitives import serialization + + +def generate_jwt( + key_pem: str, + issuer: str, + repo_id: str, + scopes: Optional[List[str]] = None, + ttl: int = 31536000, # 1 year default +) -> str: + """Generate a JWT token for Git storage authentication. + + Args: + key_pem: Private key in PEM format (PKCS8) + issuer: Token issuer (customer name) + repo_id: Repository identifier + scopes: List of permission scopes (defaults to ['git:write', 'git:read']) + ttl: Time-to-live in seconds (defaults to 1 year) + + Returns: + Signed JWT token string + + Raises: + ValueError: If key is invalid or cannot be loaded + """ + if not scopes: + scopes = ["git:write", "git:read"] + + now = int(time.time()) + payload = { + "iss": issuer, + "sub": "@pierre/storage", + "repo": repo_id, + "scopes": scopes, + "iat": now, + "exp": now + ttl, + } + + # Load the private key and determine algorithm + try: + private_key = serialization.load_pem_private_key( + key_pem.encode("utf-8"), + password=None, + ) + except Exception as e: + raise ValueError(f"Failed to load private key: {e}") from e + + # Determine algorithm based on key type + key_type = type(private_key).__name__ + if "RSA" in key_type: + algorithm = "RS256" + elif "EC" in key_type or "Elliptic" in key_type: + algorithm = "ES256" + elif "Ed25519" in key_type or "Ed448" in key_type: + algorithm = "EdDSA" + else: + # Try ES256 as default (most common for Pierre) + algorithm = "ES256" + + # Sign the JWT + try: + token = jwt.encode( + payload, + private_key, + algorithm=algorithm, + headers={"alg": algorithm, "typ": "JWT"}, + ) + except Exception as e: + raise ValueError(f"Failed to sign JWT: {e}") from e + + return token diff --git a/packages/git-storage-sdk-python/pierre_storage/client.py b/packages/git-storage-sdk-python/pierre_storage/client.py new file mode 100644 index 000000000..353f5b3b4 --- /dev/null +++ b/packages/git-storage-sdk-python/pierre_storage/client.py @@ -0,0 +1,449 @@ +"""Main client for Pierre Git Storage SDK.""" + +import uuid +from typing import Any, Dict, Optional, cast +from urllib.parse import urlencode + +import httpx + +from pierre_storage.auth import generate_jwt +from pierre_storage.errors import ApiError +from pierre_storage.repo import DEFAULT_TOKEN_TTL_SECONDS, RepoImpl +from pierre_storage.types import ( + BaseRepo, + DeleteRepoResult, + ForkBaseRepo, + GitHubBaseRepo, + GitStorageOptions, + ListReposResult, + Repo, + RepoInfo, +) +from pierre_storage.version import get_user_agent + +DEFAULT_API_BASE_URL = "https://api.{{org}}.code.storage" +DEFAULT_STORAGE_BASE_URL = "{{org}}.code.storage" +DEFAULT_API_VERSION = 1 + + +class GitStorage: + """Pierre Git Storage client.""" + + def __init__(self, options: GitStorageOptions) -> None: + """Initialize GitStorage client. + + Args: + options: Client configuration options + + Raises: + ValueError: If required options are missing or invalid + """ + # Validate required fields + if not options or "name" not in options or "key" not in options: + raise ValueError( + "GitStorage requires a name and key. Please check your configuration and try again." + ) + + name = options["name"] + key = options["key"] + + if name is None or key is None: + raise ValueError( + "GitStorage requires a name and key. Please check your configuration and try again." + ) + + if not isinstance(name, str) or not name.strip(): + raise ValueError("GitStorage name must be a non-empty string.") + + if not isinstance(key, str) or not key.strip(): + raise ValueError("GitStorage key must be a non-empty string.") + + # Resolve configuration + api_base_url = options.get("api_base_url") or self.get_default_api_base_url(name) + storage_base_url = options.get("storage_base_url") or self.get_default_storage_base_url( + name + ) + api_version = options.get("api_version") or DEFAULT_API_VERSION + default_ttl = options.get("default_ttl") + + self.options: GitStorageOptions = { + "name": name, + "key": key, + "api_base_url": api_base_url, + "storage_base_url": storage_base_url, + "api_version": api_version, + } + + if default_ttl: + self.options["default_ttl"] = default_ttl + + @staticmethod + def get_default_api_base_url(name: str) -> str: + """Get default API base URL with org name inserted. + + Args: + name: Organization name + + Returns: + API base URL with org name inserted + """ + return DEFAULT_API_BASE_URL.replace("{{org}}", name) + + @staticmethod + def get_default_storage_base_url(name: str) -> str: + """Get default storage base URL with org name inserted. + + Args: + name: Organization name + + Returns: + Storage base URL with org name inserted + """ + return DEFAULT_STORAGE_BASE_URL.replace("{{org}}", name) + + async def create_repo( + self, + *, + id: Optional[str] = None, + default_branch: Optional[str] = None, + base_repo: Optional[BaseRepo] = None, + ttl: Optional[int] = None, + ) -> Repo: + """Create a new repository. + + Args: + id: Repository ID (auto-generated if not provided) + default_branch: Default branch name (default: "main" for non-forks) + base_repo: Optional base repository for GitHub sync or fork + GitHub: owner, name, default_branch + Fork: id, ref, sha + ttl: Token TTL in seconds + + Returns: + Created repository instance + + Raises: + ApiError: If repository creation fails + """ + repo_id = id or str(uuid.uuid4()) + ttl = ttl or DEFAULT_TOKEN_TTL_SECONDS + jwt = self._generate_jwt( + repo_id, + {"permissions": ["repo:write"], "ttl": ttl}, + ) + + url = f"{self.options['api_base_url']}/api/v{self.options['api_version']}/repos" + body: Dict[str, Any] = {} + + # Match backend priority: base_repo.default_branch > default_branch > 'main' + explicit_default_branch = default_branch is not None + resolved_default_branch: Optional[str] = None + + if base_repo: + if "id" in base_repo: + fork_repo = cast(ForkBaseRepo, base_repo) + base_repo_token = self._generate_jwt( + fork_repo["id"], + {"permissions": ["git:read"], "ttl": ttl}, + ) + base_repo_payload: Dict[str, Any] = { + "provider": "code", + "owner": self.options["name"], + "name": fork_repo["id"], + "operation": "fork", + "auth": {"token": base_repo_token}, + } + if fork_repo.get("ref"): + base_repo_payload["ref"] = fork_repo["ref"] + if fork_repo.get("sha"): + base_repo_payload["sha"] = fork_repo["sha"] + body["base_repo"] = base_repo_payload + if explicit_default_branch: + resolved_default_branch = default_branch + body["default_branch"] = default_branch + else: + github_repo = cast(GitHubBaseRepo, base_repo) + # Ensure provider is set to 'github' if not provided + base_repo_with_provider = { + "provider": "github", + **github_repo, + } + body["base_repo"] = base_repo_with_provider + if github_repo.get("default_branch"): + resolved_default_branch = github_repo["default_branch"] + elif explicit_default_branch: + resolved_default_branch = default_branch + else: + resolved_default_branch = "main" + body["default_branch"] = resolved_default_branch + else: + resolved_default_branch = default_branch if explicit_default_branch else "main" + body["default_branch"] = resolved_default_branch + + async with httpx.AsyncClient() as client: + response = await client.post( + url, + headers={ + "Authorization": f"Bearer {jwt}", + "Content-Type": "application/json", + "Code-Storage-Agent": get_user_agent(), + }, + json=body, + timeout=30.0, + ) + + if response.status_code == 409: + raise ApiError("Repository already exists", status_code=409) + + if not response.is_success: + raise ApiError( + f"Failed to create repository: {response.status_code} {response.reason_phrase}", + status_code=response.status_code, + response=response, + ) + + # These are guaranteed to be set in __init__ + api_base_url: str = self.options["api_base_url"] # type: ignore[assignment] + storage_base_url: str = self.options["storage_base_url"] # type: ignore[assignment] + name: str = self.options["name"] + api_version: int = self.options["api_version"] # type: ignore[assignment] + + return RepoImpl( + repo_id, + resolved_default_branch or "main", + api_base_url, + storage_base_url, + name, + api_version, + self._generate_jwt, + ) + + async def list_repos( + self, + *, + cursor: Optional[str] = None, + limit: Optional[int] = None, + ttl: Optional[int] = None, + ) -> ListReposResult: + """List repositories for the organization.""" + ttl = ttl or DEFAULT_TOKEN_TTL_SECONDS + jwt = self._generate_jwt( + "org", + {"permissions": ["org:read"], "ttl": ttl}, + ) + + params: Dict[str, str] = {} + if cursor: + params["cursor"] = cursor + if limit is not None: + params["limit"] = str(limit) + + url = f"{self.options['api_base_url']}/api/v{self.options['api_version']}/repos" + if params: + url += f"?{urlencode(params)}" + + async with httpx.AsyncClient() as client: + response = await client.get( + url, + headers={ + "Authorization": f"Bearer {jwt}", + "Code-Storage-Agent": get_user_agent(), + }, + timeout=30.0, + ) + + if not response.is_success: + raise ApiError( + f"Failed to list repositories: {response.status_code} {response.reason_phrase}", + status_code=response.status_code, + response=response, + ) + + data = response.json() + repos: list[RepoInfo] = [] + for repo in data.get("repos", []): + entry: RepoInfo = { + "repo_id": repo.get("repo_id", ""), + "url": repo.get("url", ""), + "default_branch": repo.get("default_branch", "main"), + "created_at": repo.get("created_at", ""), + } + if repo.get("base_repo"): + entry["base_repo"] = repo.get("base_repo") + repos.append(entry) + + return { + "repos": repos, + "next_cursor": data.get("next_cursor"), + "has_more": data.get("has_more", False), + } + + async def find_one(self, *, id: str) -> Optional[Repo]: + """Find a repository by ID. + + Args: + id: Repository ID to find + + Returns: + Repository instance if found, None otherwise + """ + repo_id = id + jwt = self._generate_jwt( + repo_id, + {"permissions": ["git:read"], "ttl": DEFAULT_TOKEN_TTL_SECONDS}, + ) + + url = f"{self.options['api_base_url']}/api/v{self.options['api_version']}/repo" + + async with httpx.AsyncClient() as client: + response = await client.get( + url, + headers={ + "Authorization": f"Bearer {jwt}", + "Code-Storage-Agent": get_user_agent(), + }, + timeout=30.0, + ) + + if response.status_code == 404: + return None + + if not response.is_success: + raise ApiError( + f"Failed to find repository: {response.status_code} {response.reason_phrase}", + status_code=response.status_code, + response=response, + ) + + body = response.json() + default_branch = body.get("default_branch", "main") + + # These are guaranteed to be set in __init__ + api_base_url: str = self.options["api_base_url"] # type: ignore[assignment] + storage_base_url: str = self.options["storage_base_url"] # type: ignore[assignment] + name: str = self.options["name"] + api_version: int = self.options["api_version"] # type: ignore[assignment] + + return RepoImpl( + repo_id, + default_branch, + api_base_url, + storage_base_url, + name, + api_version, + self._generate_jwt, + ) + + async def delete_repo( + self, + *, + id: str, + ttl: Optional[int] = None, + ) -> DeleteRepoResult: + """Delete a repository by ID. + + Args: + id: Repository ID to delete + ttl: Token TTL in seconds + + Returns: + Deletion result with repo_id and message + + Raises: + ApiError: If repository not found or already deleted + """ + repo_id = id + ttl = ttl or DEFAULT_TOKEN_TTL_SECONDS + jwt = self._generate_jwt( + repo_id, + {"permissions": ["repo:write"], "ttl": ttl}, + ) + + url = f"{self.options['api_base_url']}/api/v{self.options['api_version']}/repos/delete" + + async with httpx.AsyncClient() as client: + response = await client.delete( + url, + headers={ + "Authorization": f"Bearer {jwt}", + "Content-Type": "application/json", + "Code-Storage-Agent": get_user_agent(), + }, + timeout=30.0, + ) + + if response.status_code == 404: + raise ApiError("Repository not found", status_code=404) + + if response.status_code == 409: + raise ApiError("Repository already deleted", status_code=409) + + if not response.is_success: + raise ApiError( + f"Failed to delete repository: {response.status_code} {response.reason_phrase}", + status_code=response.status_code, + response=response, + ) + + body = response.json() + return DeleteRepoResult( + repo_id=body["repo_id"], + message=body["message"], + ) + + def get_config(self) -> GitStorageOptions: + """Get current client configuration. + + Returns: + Copy of current configuration + """ + return {**self.options} + + def _generate_jwt( + self, + repo_id: str, + options: Optional[Dict[str, Any]] = None, + ) -> str: + """Generate JWT token for authentication. + + Args: + repo_id: Repository identifier + options: JWT generation options (internal use) + + Returns: + Signed JWT token + """ + permissions = ["git:write", "git:read"] + ttl: int = 31536000 # 1 year default + + if options: + if "permissions" in options: + permissions = options["permissions"] + if "ttl" in options: + option_ttl = options["ttl"] + if isinstance(option_ttl, int): + ttl = option_ttl + elif "default_ttl" in self.options: + default_ttl = self.options["default_ttl"] + if isinstance(default_ttl, int): + ttl = default_ttl + + return generate_jwt( + self.options["key"], + self.options["name"], + repo_id, + permissions, + ttl, + ) + + +def create_client(options: GitStorageOptions) -> GitStorage: + """Create a GitStorage client. + + Args: + options: Client configuration options + + Returns: + GitStorage client instance + """ + return GitStorage(options) diff --git a/packages/git-storage-sdk-python/pierre_storage/commit.py b/packages/git-storage-sdk-python/pierre_storage/commit.py new file mode 100644 index 000000000..fc2548857 --- /dev/null +++ b/packages/git-storage-sdk-python/pierre_storage/commit.py @@ -0,0 +1,524 @@ +"""Commit builder for Pierre Git Storage SDK.""" + +import base64 +import json +import uuid +from typing import Any, AsyncIterator, Callable, Dict, List, Optional + +import httpx + +from pierre_storage.errors import RefUpdateError, infer_ref_update_reason +from pierre_storage.types import ( + CommitResult, + CreateCommitOptions, + FileSource, + GitFileMode, + RefUpdate, +) +from pierre_storage.version import get_user_agent + +MAX_CHUNK_BYTES = 4 * 1024 * 1024 # 4 MiB +DEFAULT_TTL_SECONDS = 3600 # 1 hour + + +def _normalize_commit_options(options: CreateCommitOptions) -> Dict[str, Any]: + """Validate and normalize commit options.""" + normalized: Dict[str, Any] = {} + + if options is None: + raise ValueError("createCommit options are required") + + target_branch = (options.get("target_branch") or "").strip() + if not target_branch: + raise ValueError("createCommit target_branch is required") + if target_branch.startswith("refs/"): + raise ValueError("createCommit target_branch must not include refs/ prefix") + normalized["target_branch"] = target_branch + + commit_message = (options.get("commit_message") or "").strip() + if not commit_message: + raise ValueError("createCommit commit_message is required") + normalized["commit_message"] = commit_message + + author = options.get("author") + if not author: + raise ValueError("createCommit author is required") + author_name = (author.get("name") or "").strip() + author_email = (author.get("email") or "").strip() + if not author_name or not author_email: + raise ValueError("createCommit author name and email are required") + normalized["author"] = {"name": author_name, "email": author_email} + + expected_head_sha = options.get("expected_head_sha") + if expected_head_sha: + normalized["expected_head_sha"] = expected_head_sha.strip() + + base_branch = options.get("base_branch") + if base_branch: + trimmed_base = base_branch.strip() + if not trimmed_base: + pass + elif trimmed_base.startswith("refs/"): + raise ValueError("createCommit base_branch must not include refs/ prefix") + else: + normalized["base_branch"] = trimmed_base + + if "ephemeral" in options: + normalized["ephemeral"] = bool(options["ephemeral"]) + + if "ephemeral_base" in options: + normalized["ephemeral_base"] = bool(options["ephemeral_base"]) + + if normalized.get("ephemeral_base") and "base_branch" not in normalized: + raise ValueError("createCommit ephemeral_base requires base_branch") + + if "committer" in options and options["committer"]: + normalized["committer"] = options["committer"] + + if "ttl" in options and options["ttl"]: + normalized["ttl"] = options["ttl"] + + return normalized + + +def _base_metadata_from_options(options: Dict[str, Any]) -> Dict[str, Any]: + """Build common metadata fields shared by commit requests.""" + metadata: Dict[str, Any] = { + "target_branch": options["target_branch"], + "commit_message": options["commit_message"], + "author": options["author"], + } + + if options.get("expected_head_sha"): + metadata["expected_head_sha"] = options["expected_head_sha"] + + if options.get("base_branch"): + metadata["base_branch"] = options["base_branch"] + + if options.get("ephemeral"): + metadata["ephemeral"] = True + + if options.get("ephemeral_base"): + metadata["ephemeral_base"] = True + + if options.get("committer"): + metadata["committer"] = options["committer"] + + return metadata + + +def _to_ref_update(result: Dict[str, Any]) -> RefUpdate: + """Convert result payload to ref update info.""" + return { + "branch": result.get("branch", ""), + "old_sha": result.get("old_sha", ""), + "new_sha": result.get("new_sha", ""), + } + + +def _build_commit_result(ack: Dict[str, Any]) -> CommitResult: + """Convert commit-pack style acknowledgment into CommitResult.""" + result = ack.get("result", {}) + ref_update = _to_ref_update(result) + + if not result.get("success"): + raise RefUpdateError( + result.get("message", f"Commit failed with status {result.get('status')}"), + status=result.get("status"), + ref_update=ref_update, + ) + + commit = ack.get("commit", {}) + return { + "commit_sha": commit["commit_sha"], + "tree_sha": commit["tree_sha"], + "target_branch": commit["target_branch"], + "pack_bytes": commit["pack_bytes"], + "blob_count": commit["blob_count"], + "ref_update": ref_update, + } + + +async def _parse_commit_error(response: httpx.Response, operation: str) -> Dict[str, Any]: + """Parse an error response from commit endpoints.""" + default_status = infer_ref_update_reason(str(response.status_code)) + status = default_status + reason_phrase = response.reason_phrase or "" + message = f"{operation} request failed ({response.status_code} {reason_phrase})".strip() + ref_update = None + + try: + data = await response.aread() + json_data = json.loads(data) + + if "result" in json_data: + result = json_data["result"] + if result.get("status"): + status = result["status"] + if result.get("message"): + message = result["message"] + ref_update = { + "branch": result.get("branch"), + "old_sha": result.get("old_sha"), + "new_sha": result.get("new_sha"), + } + ref_update = {k: v for k, v in (ref_update or {}).items() if v} + elif "error" in json_data: + message = json_data["error"] + except Exception: + # Preserve default message if parsing fails + pass + + return { + "status": status, + "message": message, + "ref_update": ref_update, + } + + +async def _to_async_iterator(source: FileSource) -> AsyncIterator[bytes]: + """Convert FileSource inputs into an async iterator of bytes.""" + if isinstance(source, str): + yield source.encode("utf-8") + elif isinstance(source, (bytes, bytearray, memoryview)): + yield bytes(source) + elif hasattr(source, "__aiter__"): + async for chunk in source: + if isinstance(chunk, str): + yield chunk.encode("utf-8") + else: + yield bytes(chunk) + elif hasattr(source, "__iter__"): + for chunk in source: + if isinstance(chunk, str): + yield chunk.encode("utf-8") + else: + yield bytes(chunk) + else: + raise TypeError(f"Unsupported file source type: {type(source)}") + + +async def chunk_file_source( + source: FileSource, + chunk_size: int = MAX_CHUNK_BYTES, +) -> AsyncIterator[Dict[str, Any]]: + """Yield chunk dictionaries for streaming requests.""" + pending: Optional[bytes] = None + produced = False + + async for data in _to_async_iterator(source): + if pending and len(pending) == chunk_size: + yield {"chunk": pending, "eof": False} + produced = True + pending = None + + merged = pending + data if pending else data + + while len(merged) > chunk_size: + chunk = merged[:chunk_size] + merged = merged[chunk_size:] + yield {"chunk": chunk, "eof": False} + produced = True + + pending = merged + + if pending is not None: + yield {"chunk": pending, "eof": True} + produced = True + + if not produced: + yield {"chunk": b"", "eof": True} + + +class FileOperation: + """Represents a file operation in a commit.""" + + def __init__( + self, + path: str, + content_id: str, + operation: str, + mode: Optional[str] = None, + source: Optional[FileSource] = None, + ) -> None: + """Initialize a file operation. + + Args: + path: File path + content_id: Unique content identifier + operation: Operation type ('upsert' or 'delete') + mode: Git file mode + source: File content source + """ + self.path = path + self.content_id = content_id + self.operation = operation + self.mode = mode + self.source = source + + +class CommitBuilderImpl: + """Implementation of commit builder for creating commits.""" + + def __init__( + self, + options: CreateCommitOptions, + get_auth_token: Callable[[], str], + base_url: str, + api_version: int, + ) -> None: + """Initialize the commit builder. + + Args: + options: Commit options + get_auth_token: Function to get auth token + base_url: API base URL + api_version: API version + + Raises: + ValueError: If required options are missing or invalid + """ + self.get_auth_token = get_auth_token + self.url = f"{base_url.rstrip('/')}/api/v{api_version}/repos/commit-pack" + self.operations: List[FileOperation] = [] + self.sent = False + + self.options = _normalize_commit_options(options) + + def add_file( + self, + path: str, + source: FileSource, + *, + mode: Optional[GitFileMode] = None, + ) -> "CommitBuilderImpl": + """Add a file to the commit. + + Args: + path: File path + source: File content source + mode: Git file mode (default: regular file) + + Returns: + Self for chaining + + Raises: + RuntimeError: If builder has already been sent + """ + self._ensure_not_sent() + normalized_path = self._normalize_path(path) + content_id = str(uuid.uuid4()) + file_mode = mode or GitFileMode.REGULAR + + self.operations.append( + FileOperation( + path=normalized_path, + content_id=content_id, + operation="upsert", + mode=file_mode, + source=source, + ) + ) + return self + + def add_file_from_string( + self, + path: str, + contents: str, + *, + encoding: str = "utf-8", + mode: Optional[GitFileMode] = None, + ) -> "CommitBuilderImpl": + """Add a file from a string. + + Args: + path: File path + contents: File contents as string + encoding: Text encoding (default: utf-8) + mode: Git file mode (default: regular file) + + Returns: + Self for chaining + """ + data = contents.encode(encoding) + return self.add_file(path, data, mode=mode) + + def delete_path(self, path: str) -> "CommitBuilderImpl": + """Delete a path from the commit. + + Args: + path: Path to delete + + Returns: + Self for chaining + + Raises: + RuntimeError: If builder has already been sent + """ + self._ensure_not_sent() + normalized_path = self._normalize_path(path) + self.operations.append( + FileOperation( + path=normalized_path, + content_id=str(uuid.uuid4()), + operation="delete", + ) + ) + return self + + async def send(self) -> CommitResult: + """Send the commit to the server. + + Returns: + Commit result with SHA and ref update info + + Raises: + RuntimeError: If builder has already been sent + RefUpdateError: If commit fails + """ + self._ensure_not_sent() + self.sent = True + + metadata = self._build_metadata() + auth_token = self.get_auth_token() + + headers = { + "Authorization": f"Bearer {auth_token}", + "Content-Type": "application/x-ndjson", + "Accept": "application/json", + "Code-Storage-Agent": get_user_agent(), + } + + async with ( + httpx.AsyncClient() as client, + client.stream( + "POST", + self.url, + headers=headers, + content=self._build_request_body(metadata), + timeout=180.0, + ) as response, + ): + if not response.is_success: + error_info = await _parse_commit_error(response, "createCommit") + raise RefUpdateError( + error_info["message"], + status=error_info["status"], + reason=error_info["status"], + ref_update=error_info.get("ref_update"), + ) + + result_data = await response.aread() + result = json.loads(result_data) + return _build_commit_result(result) + + def _build_metadata(self) -> Dict[str, Any]: + """Build metadata payload for commit.""" + files = [] + for op in self.operations: + file_entry: Dict[str, Any] = { + "path": op.path, + "content_id": op.content_id, + "operation": op.operation, + } + if op.mode: + file_entry["mode"] = op.mode + files.append(file_entry) + + metadata = _base_metadata_from_options(self.options) + metadata["files"] = files + return metadata + + async def _build_request_body(self, metadata: Dict[str, Any]) -> AsyncIterator[bytes]: + """Build NDJSON request body with metadata and blob chunks.""" + # First line: metadata + yield json.dumps({"metadata": metadata}).encode("utf-8") + b"\n" + + # Subsequent lines: blob chunks + for op in self.operations: + if op.operation == "upsert" and op.source is not None: + async for chunk in chunk_file_source(op.source): + blob_chunk = { + "blob_chunk": { + "content_id": op.content_id, + "data": base64.b64encode(chunk["chunk"]).decode("ascii"), + "eof": chunk["eof"], + } + } + yield json.dumps(blob_chunk).encode("utf-8") + b"\n" + + def _ensure_not_sent(self) -> None: + """Ensure the builder hasn't been sent yet.""" + if self.sent: + raise RuntimeError("createCommit builder cannot be reused after send()") + + def _normalize_path(self, path: str) -> str: + """Normalize a file path.""" + if not path or not isinstance(path, str) or not path.strip(): + raise ValueError("File path must be a non-empty string") + return path.lstrip("/") + + +async def send_diff_commit_request( + options: CreateCommitOptions, + diff_source: FileSource, + get_auth_token: Callable[[], str], + base_url: str, + api_version: int, +) -> CommitResult: + """Send a diff-based commit request.""" + normalized_options = _normalize_commit_options(options) + metadata = _base_metadata_from_options(normalized_options) + + auth_token = get_auth_token() + headers = { + "Authorization": f"Bearer {auth_token}", + "Content-Type": "application/x-ndjson", + "Accept": "application/json", + "Code-Storage-Agent": get_user_agent(), + } + + async def request_stream() -> AsyncIterator[bytes]: + yield json.dumps({"metadata": metadata}).encode("utf-8") + b"\n" + async for chunk in chunk_file_source(diff_source): + payload = { + "diff_chunk": { + "data": base64.b64encode(chunk["chunk"]).decode("ascii"), + "eof": chunk["eof"], + } + } + yield json.dumps(payload).encode("utf-8") + b"\n" + + url = f"{base_url.rstrip('/')}/api/v{api_version}/repos/diff-commit" + + async with ( + httpx.AsyncClient() as client, + client.stream( + "POST", + url, + headers=headers, + content=request_stream(), + timeout=180.0, + ) as response, + ): + if not response.is_success: + error_info = await _parse_commit_error(response, "createCommitFromDiff") + raise RefUpdateError( + error_info["message"], + status=error_info["status"], + reason=error_info["status"], + ref_update=error_info.get("ref_update"), + ) + + result_data = await response.aread() + result = json.loads(result_data) + return _build_commit_result(result) + + +def resolve_commit_ttl_seconds(options: Optional[CreateCommitOptions]) -> int: + """Resolve TTL for commit operations.""" + if options and "ttl" in options: + ttl = options["ttl"] + if ttl and ttl > 0: + return ttl + return DEFAULT_TTL_SECONDS diff --git a/packages/git-storage-sdk-python/pierre_storage/errors.py b/packages/git-storage-sdk-python/pierre_storage/errors.py new file mode 100644 index 000000000..bdd78a8b5 --- /dev/null +++ b/packages/git-storage-sdk-python/pierre_storage/errors.py @@ -0,0 +1,81 @@ +"""Error classes for Pierre Git Storage SDK.""" + +from typing import TYPE_CHECKING, Any, Dict, Optional + +if TYPE_CHECKING: + from pierre_storage.types import RefUpdate + + +class ApiError(Exception): + """Exception raised for API errors.""" + + def __init__( + self, + message: str, + status_code: Optional[int] = None, + response: Optional[Any] = None, + ) -> None: + """Initialize the ApiError. + + Args: + message: Error message + status_code: HTTP status code + response: Raw response object + """ + super().__init__(message) + self.message = message + self.status_code = status_code + self.response = response + + +class RefUpdateError(Exception): + """Exception raised when a ref update fails.""" + + def __init__( + self, + message: str, + status: Optional[str] = None, + reason: Optional[str] = None, + ref_update: "Optional[RefUpdate]" = None, + ) -> None: + """Initialize the RefUpdateError. + + Args: + message: Error message + status: Status code from the server + reason: Reason for the failure + ref_update: Partial ref update information + """ + super().__init__(message) + self.message = message + self.status = status or "unknown" + self.reason = reason or self.status + self.ref_update: Dict[str, str] = ref_update or {} # type: ignore[assignment] + + +def infer_ref_update_reason(status_code: str) -> str: + """Infer the ref update reason from HTTP status code. + + Args: + status_code: HTTP status code as string + + Returns: + Inferred reason string + """ + status_map = { + "400": "invalid", + "401": "unauthorized", + "403": "forbidden", + "404": "not_found", + "408": "timeout", + "409": "conflict", + "412": "precondition_failed", + "422": "invalid", + "429": "unavailable", + "499": "timeout", + "500": "internal", + "502": "unavailable", + "503": "unavailable", + "504": "timeout", + } + return status_map.get(status_code, "unknown") diff --git a/packages/git-storage-sdk-python/pierre_storage/py.typed b/packages/git-storage-sdk-python/pierre_storage/py.typed new file mode 100644 index 000000000..7632ecf77 --- /dev/null +++ b/packages/git-storage-sdk-python/pierre_storage/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561 diff --git a/packages/git-storage-sdk-python/pierre_storage/repo.py b/packages/git-storage-sdk-python/pierre_storage/repo.py new file mode 100644 index 000000000..ce545ef42 --- /dev/null +++ b/packages/git-storage-sdk-python/pierre_storage/repo.py @@ -0,0 +1,1290 @@ +"""Repository implementation for Pierre Git Storage SDK.""" + +from datetime import datetime +from types import TracebackType +from typing import Any, Callable, Dict, List, Optional +from urllib.parse import urlencode + +import httpx + +from pierre_storage.commit import ( + CommitBuilderImpl, + resolve_commit_ttl_seconds, + send_diff_commit_request, +) +from pierre_storage.errors import ApiError, RefUpdateError, infer_ref_update_reason +from pierre_storage.types import ( + BranchInfo, + CommitBuilder, + CommitInfo, + CommitResult, + CommitSignature, + CreateBranchResult, + CreateCommitOptions, + DiffFileState, + FileDiff, + FileSource, + FilteredFile, + GetBranchDiffResult, + GetCommitDiffResult, + GrepFileMatch, + GrepLine, + GrepResult, + ListBranchesResult, + ListCommitsResult, + ListFilesResult, + NoteReadResult, + NoteWriteResult, + RefUpdate, + RestoreCommitResult, +) +from pierre_storage.version import get_user_agent + +DEFAULT_TOKEN_TTL_SECONDS = 3600 # 1 hour + + +class StreamingResponse: + """Stream wrapper that keeps the HTTP client alive until closed.""" + + def __init__(self, response: httpx.Response, client: httpx.AsyncClient) -> None: + self._response = response + self._client = client + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + async def aclose(self) -> None: + await self._response.aclose() + await self._client.aclose() + + async def __aenter__(self) -> "StreamingResponse": + return self + + async def __aexit__( + self, + exc_type: Optional[type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + await self.aclose() + + +def resolve_invocation_ttl_seconds( + options: Optional[Dict[str, Any]] = None, + default_value: int = DEFAULT_TOKEN_TTL_SECONDS, +) -> int: + """Resolve TTL for API invocations.""" + if options and "ttl" in options: + ttl = options["ttl"] + if isinstance(ttl, int) and ttl > 0: + return int(ttl) + return default_value + + +def normalize_diff_state(raw_state: str) -> DiffFileState: + """Normalize diff state from raw format.""" + if not raw_state: + return DiffFileState.UNKNOWN + + leading = raw_state.strip()[0].upper() if raw_state.strip() else "" + state_map = { + "A": DiffFileState.ADDED, + "M": DiffFileState.MODIFIED, + "D": DiffFileState.DELETED, + "R": DiffFileState.RENAMED, + "C": DiffFileState.COPIED, + "T": DiffFileState.TYPE_CHANGED, + "U": DiffFileState.UNMERGED, + } + return state_map.get(leading, DiffFileState.UNKNOWN) + + +class RepoImpl: + """Implementation of repository operations.""" + + def __init__( + self, + repo_id: str, + default_branch: str, + api_base_url: str, + storage_base_url: str, + name: str, + api_version: int, + generate_jwt: Callable[[str, Optional[Dict[str, Any]]], str], + ) -> None: + """Initialize repository. + + Args: + repo_id: Repository identifier + default_branch: Default branch name + api_base_url: API base URL + storage_base_url: Storage base URL + name: Customer name + api_version: API version + generate_jwt: Function to generate JWT tokens + """ + self._id = repo_id + self._default_branch = default_branch + self.api_base_url = api_base_url.rstrip("/") + self.storage_base_url = storage_base_url + self.name = name + self.api_version = api_version + self.generate_jwt = generate_jwt + + @property + def id(self) -> str: + """Get repository ID.""" + return self._id + + @property + def default_branch(self) -> str: + """Get default branch name.""" + return self._default_branch + + async def get_remote_url( + self, + *, + permissions: Optional[list[str]] = None, + ttl: Optional[int] = None, + ) -> str: + """Get remote URL for Git operations. + + Args: + permissions: List of permissions (e.g., ["git:write", "git:read"]) + ttl: Token TTL in seconds + + Returns: + Git remote URL with embedded JWT + """ + options: Dict[str, Any] = {} + if permissions is not None: + options["permissions"] = permissions + if ttl is not None: + options["ttl"] = ttl + + jwt_token = self.generate_jwt(self._id, options if options else None) + url = f"https://t:{jwt_token}@{self.storage_base_url}/{self._id}.git" + return url + + async def get_ephemeral_remote_url( + self, + *, + permissions: Optional[list[str]] = None, + ttl: Optional[int] = None, + ) -> str: + """Get ephemeral remote URL for Git operations. + + Args: + permissions: List of permissions (e.g., ["git:write", "git:read"]) + ttl: Token TTL in seconds + + Returns: + Git remote URL with embedded JWT pointing to ephemeral namespace + """ + url = await self.get_remote_url(permissions=permissions, ttl=ttl) + return url.replace(".git", "+ephemeral.git") + + async def get_file_stream( + self, + *, + path: str, + ref: Optional[str] = None, + ephemeral: Optional[bool] = None, + ttl: Optional[int] = None, + ) -> StreamingResponse: + """Get file content as streaming response. + + Args: + path: File path to retrieve + ref: Git ref (branch, tag, or commit SHA) + ephemeral: Whether to read from the ephemeral namespace + ttl: Token TTL in seconds + + Returns: + HTTP response with file content stream + """ + ttl = ttl or DEFAULT_TOKEN_TTL_SECONDS + jwt = self.generate_jwt(self._id, {"permissions": ["git:read"], "ttl": ttl}) + + params = {"path": path} + if ref: + params["ref"] = ref + if ephemeral is not None: + params["ephemeral"] = "true" if ephemeral else "false" + + url = f"{self.api_base_url}/api/v{self.api_version}/repos/file" + if params: + url += f"?{urlencode(params)}" + + client = httpx.AsyncClient() + try: + stream_context = client.stream( + "GET", + url, + headers={ + "Authorization": f"Bearer {jwt}", + "Code-Storage-Agent": get_user_agent(), + }, + timeout=30.0, + ) + response = await stream_context.__aenter__() + response.raise_for_status() + except Exception: + await client.aclose() + raise + + return StreamingResponse(response, client) + + async def list_files( + self, + *, + ref: Optional[str] = None, + ephemeral: Optional[bool] = None, + ttl: Optional[int] = None, + ) -> ListFilesResult: + """List files in repository. + + Args: + ref: Git ref (branch, tag, or commit SHA) + ephemeral: Whether to read from the ephemeral namespace + ttl: Token TTL in seconds + + Returns: + List of file paths and ref + """ + ttl = ttl or DEFAULT_TOKEN_TTL_SECONDS + jwt = self.generate_jwt(self._id, {"permissions": ["git:read"], "ttl": ttl}) + + params = {} + if ref: + params["ref"] = ref + if ephemeral is not None: + params["ephemeral"] = "true" if ephemeral else "false" + + url = f"{self.api_base_url}/api/v{self.api_version}/repos/files" + if params: + url += f"?{urlencode(params)}" + + async with httpx.AsyncClient() as client: + response = await client.get( + url, + headers={ + "Authorization": f"Bearer {jwt}", + "Code-Storage-Agent": get_user_agent(), + }, + timeout=30.0, + ) + response.raise_for_status() + data = response.json() + return {"paths": data["paths"], "ref": data["ref"]} + + async def list_branches( + self, + *, + cursor: Optional[str] = None, + limit: Optional[int] = None, + ttl: Optional[int] = None, + ) -> ListBranchesResult: + """List branches in repository. + + Args: + cursor: Pagination cursor + limit: Maximum number of branches to return + ttl: Token TTL in seconds + + Returns: + List of branches with pagination info + """ + ttl = ttl or DEFAULT_TOKEN_TTL_SECONDS + jwt = self.generate_jwt(self._id, {"permissions": ["git:read"], "ttl": ttl}) + + params = {} + if cursor: + params["cursor"] = cursor + if limit is not None: + params["limit"] = str(limit) + + url = f"{self.api_base_url}/api/v{self.api_version}/repos/branches" + if params: + url += f"?{urlencode(params)}" + + async with httpx.AsyncClient() as client: + response = await client.get( + url, + headers={ + "Authorization": f"Bearer {jwt}", + "Code-Storage-Agent": get_user_agent(), + }, + timeout=30.0, + ) + response.raise_for_status() + data = response.json() + + branches: List[BranchInfo] = [ + { + "cursor": b["cursor"], + "name": b["name"], + "head_sha": b["head_sha"], + "created_at": b["created_at"], + } + for b in data["branches"] + ] + + return { + "branches": branches, + "next_cursor": data.get("next_cursor"), + "has_more": data["has_more"], + } + + async def create_branch( + self, + *, + base_branch: str, + target_branch: str, + base_is_ephemeral: bool = False, + target_is_ephemeral: bool = False, + ttl: Optional[int] = None, + ) -> CreateBranchResult: + """Create or promote a branch.""" + base_branch_clean = base_branch.strip() + target_branch_clean = target_branch.strip() + + if not base_branch_clean: + raise ValueError("create_branch base_branch is required") + if not target_branch_clean: + raise ValueError("create_branch target_branch is required") + + ttl_value = resolve_invocation_ttl_seconds({"ttl": ttl} if ttl is not None else None) + jwt = self.generate_jwt( + self._id, + {"permissions": ["git:write"], "ttl": ttl_value}, + ) + + payload: Dict[str, Any] = { + "base_branch": base_branch_clean, + "target_branch": target_branch_clean, + "base_is_ephemeral": bool(base_is_ephemeral), + "target_is_ephemeral": bool(target_is_ephemeral), + } + + url = f"{self.api_base_url}/api/v{self.api_version}/repos/branches/create" + + async with httpx.AsyncClient() as client: + response = await client.post( + url, + headers={ + "Authorization": f"Bearer {jwt}", + "Content-Type": "application/json", + "Code-Storage-Agent": get_user_agent(), + }, + json=payload, + timeout=180.0, + ) + + if response.status_code != 200: + message = "Create branch failed" + try: + error_data = response.json() + if isinstance(error_data, dict) and error_data.get("message"): + message = str(error_data["message"]) + else: + message = f"{message} with HTTP {response.status_code}" + except Exception: + message = f"{message} with HTTP {response.status_code}" + raise ApiError(message, status_code=response.status_code, response=response) + + data = response.json() + + result: CreateBranchResult = { + "message": data.get("message", "branch created"), + "target_branch": data["target_branch"], + "target_is_ephemeral": data.get("target_is_ephemeral", False), + } + commit_sha = data.get("commit_sha") + if commit_sha: + result["commit_sha"] = commit_sha + return result + + async def promote_ephemeral_branch( + self, + *, + base_branch: Optional[str] = None, + target_branch: Optional[str] = None, + ttl: Optional[int] = None, + ) -> CreateBranchResult: + """Promote an ephemeral branch to a persistent target branch.""" + if base_branch is None: + raise ValueError("promote_ephemeral_branch base_branch is required") + + base_clean = base_branch.strip() + if not base_clean: + raise ValueError("promote_ephemeral_branch base_branch is required") + + target_clean = target_branch.strip() if target_branch is not None else base_clean + if not target_clean: + raise ValueError("promote_ephemeral_branch target_branch is required") + + return await self.create_branch( + base_branch=base_clean, + target_branch=target_clean, + base_is_ephemeral=True, + target_is_ephemeral=False, + ttl=ttl, + ) + + async def list_commits( + self, + *, + branch: Optional[str] = None, + cursor: Optional[str] = None, + limit: Optional[int] = None, + ttl: Optional[int] = None, + ) -> ListCommitsResult: + """List commits in repository. + + Args: + branch: Branch name to list commits from + cursor: Pagination cursor + limit: Maximum number of commits to return + ttl: Token TTL in seconds + + Returns: + List of commits with pagination info + """ + ttl = ttl or DEFAULT_TOKEN_TTL_SECONDS + jwt = self.generate_jwt(self._id, {"permissions": ["git:read"], "ttl": ttl}) + + params = {} + if branch: + params["branch"] = branch + if cursor: + params["cursor"] = cursor + if limit is not None: + params["limit"] = str(limit) + + url = f"{self.api_base_url}/api/v{self.api_version}/repos/commits" + if params: + url += f"?{urlencode(params)}" + + async with httpx.AsyncClient() as client: + response = await client.get( + url, + headers={ + "Authorization": f"Bearer {jwt}", + "Code-Storage-Agent": get_user_agent(), + }, + timeout=30.0, + ) + response.raise_for_status() + data = response.json() + + commits: List[CommitInfo] = [] + for c in data["commits"]: + date = datetime.fromisoformat(c["date"].replace("Z", "+00:00")) + commits.append( + { + "sha": c["sha"], + "message": c["message"], + "author_name": c["author_name"], + "author_email": c["author_email"], + "committer_name": c["committer_name"], + "committer_email": c["committer_email"], + "date": date, + "raw_date": c["date"], + } + ) + + return { + "commits": commits, + "next_cursor": data.get("next_cursor"), + "has_more": data["has_more"], + } + + async def get_note( + self, + *, + sha: str, + ttl: Optional[int] = None, + ) -> NoteReadResult: + """Read a git note.""" + sha_clean = sha.strip() + if not sha_clean: + raise ValueError("get_note sha is required") + + ttl = ttl or DEFAULT_TOKEN_TTL_SECONDS + jwt = self.generate_jwt(self._id, {"permissions": ["git:read"], "ttl": ttl}) + + url = f"{self.api_base_url}/api/v{self.api_version}/repos/notes?{urlencode({'sha': sha_clean})}" + + async with httpx.AsyncClient() as client: + response = await client.get( + url, + headers={ + "Authorization": f"Bearer {jwt}", + "Code-Storage-Agent": get_user_agent(), + }, + timeout=30.0, + ) + response.raise_for_status() + data = response.json() + return { + "sha": data["sha"], + "note": data["note"], + "ref_sha": data["ref_sha"], + } + + async def create_note( + self, + *, + sha: str, + note: str, + expected_ref_sha: Optional[str] = None, + author: Optional[CommitSignature] = None, + ttl: Optional[int] = None, + ) -> NoteWriteResult: + """Create a git note.""" + return await self._write_note( + action_label="create_note", + action="add", + sha=sha, + note=note, + expected_ref_sha=expected_ref_sha, + author=author, + ttl=ttl, + ) + + async def append_note( + self, + *, + sha: str, + note: str, + expected_ref_sha: Optional[str] = None, + author: Optional[CommitSignature] = None, + ttl: Optional[int] = None, + ) -> NoteWriteResult: + """Append to a git note.""" + return await self._write_note( + action_label="append_note", + action="append", + sha=sha, + note=note, + expected_ref_sha=expected_ref_sha, + author=author, + ttl=ttl, + ) + + async def delete_note( + self, + *, + sha: str, + expected_ref_sha: Optional[str] = None, + author: Optional[CommitSignature] = None, + ttl: Optional[int] = None, + ) -> NoteWriteResult: + """Delete a git note.""" + sha_clean = sha.strip() + if not sha_clean: + raise ValueError("delete_note sha is required") + + ttl = ttl or DEFAULT_TOKEN_TTL_SECONDS + jwt = self.generate_jwt(self._id, {"permissions": ["git:write"], "ttl": ttl}) + + payload: Dict[str, Any] = {"sha": sha_clean} + if expected_ref_sha and expected_ref_sha.strip(): + payload["expected_ref_sha"] = expected_ref_sha.strip() + if author: + author_name = author.get("name", "").strip() + author_email = author.get("email", "").strip() + if not author_name or not author_email: + raise ValueError("delete_note author name and email are required when provided") + payload["author"] = {"name": author_name, "email": author_email} + + url = f"{self.api_base_url}/api/v{self.api_version}/repos/notes" + + async with httpx.AsyncClient() as client: + response = await client.request( + "DELETE", + url, + headers={ + "Authorization": f"Bearer {jwt}", + "Content-Type": "application/json", + "Code-Storage-Agent": get_user_agent(), + }, + json=payload, + timeout=30.0, + ) + + return self._parse_note_write_response(response, "delete_note") + + async def get_branch_diff( + self, + *, + branch: str, + base: Optional[str] = None, + ephemeral: Optional[bool] = None, + ephemeral_base: Optional[bool] = None, + paths: Optional[list[str]] = None, + ttl: Optional[int] = None, + ) -> GetBranchDiffResult: + """Get diff between branches. + + Args: + branch: Target branch name + base: Base branch name (for comparison) + ephemeral: Whether to resolve the branch under the ephemeral namespace + ephemeral_base: Whether to resolve the base branch under the ephemeral namespace + paths: Optional paths to filter the diff to specific files + ttl: Token TTL in seconds + + Returns: + Branch diff with stats and file changes + """ + ttl = ttl or DEFAULT_TOKEN_TTL_SECONDS + jwt = self.generate_jwt(self._id, {"permissions": ["git:read"], "ttl": ttl}) + + params: list[tuple[str, str]] = [("branch", branch)] + if base: + params.append(("base", base)) + if ephemeral is not None: + params.append(("ephemeral", "true" if ephemeral else "false")) + if ephemeral_base is not None: + params.append(("ephemeral_base", "true" if ephemeral_base else "false")) + if paths: + for p in paths: + params.append(("path", p)) + + url = f"{self.api_base_url}/api/v{self.api_version}/repos/branches/diff" + url += f"?{urlencode(params)}" + + async with httpx.AsyncClient() as client: + response = await client.get( + url, + headers={ + "Authorization": f"Bearer {jwt}", + "Code-Storage-Agent": get_user_agent(), + }, + timeout=60.0, + ) + response.raise_for_status() + data = response.json() + + files: List[FileDiff] = [] + for f in data["files"]: + files.append( + { + "path": f["path"], + "state": normalize_diff_state(f["state"]), + "raw_state": f["state"], + "old_path": f.get("old_path"), + "raw": f["raw"], + "bytes": f["bytes"], + "is_eof": f["is_eof"], + } + ) + + filtered_files: List[FilteredFile] = [] + for f in data.get("filtered_files", []): + filtered_files.append( + { + "path": f["path"], + "state": normalize_diff_state(f["state"]), + "raw_state": f["state"], + "old_path": f.get("old_path"), + "bytes": f["bytes"], + "is_eof": f["is_eof"], + } + ) + + return { + "branch": data["branch"], + "base": data["base"], + "stats": data["stats"], + "files": files, + "filtered_files": filtered_files, + } + + async def get_commit_diff( + self, + *, + sha: str, + base_sha: Optional[str] = None, + paths: Optional[list[str]] = None, + ttl: Optional[int] = None, + ) -> GetCommitDiffResult: + """Get diff for a specific commit. + + Args: + sha: Commit SHA + base_sha: Optional base commit SHA to compare against + paths: Optional paths to filter the diff to specific files + ttl: Token TTL in seconds + + Returns: + Commit diff with stats and file changes + """ + ttl = ttl or DEFAULT_TOKEN_TTL_SECONDS + jwt = self.generate_jwt(self._id, {"permissions": ["git:read"], "ttl": ttl}) + + params: list[tuple[str, str]] = [("sha", sha)] + if base_sha: + params.append(("baseSha", base_sha)) + if paths: + for p in paths: + params.append(("path", p)) + url = f"{self.api_base_url}/api/v{self.api_version}/repos/diff" + url += f"?{urlencode(params)}" + + async with httpx.AsyncClient() as client: + response = await client.get( + url, + headers={ + "Authorization": f"Bearer {jwt}", + "Code-Storage-Agent": get_user_agent(), + }, + timeout=60.0, + ) + response.raise_for_status() + data = response.json() + + files: List[FileDiff] = [] + for f in data["files"]: + files.append( + { + "path": f["path"], + "state": normalize_diff_state(f["state"]), + "raw_state": f["state"], + "old_path": f.get("old_path"), + "raw": f["raw"], + "bytes": f["bytes"], + "is_eof": f["is_eof"], + } + ) + + filtered_files: List[FilteredFile] = [] + for f in data.get("filtered_files", []): + filtered_files.append( + { + "path": f["path"], + "state": normalize_diff_state(f["state"]), + "raw_state": f["state"], + "old_path": f.get("old_path"), + "bytes": f["bytes"], + "is_eof": f["is_eof"], + } + ) + + return { + "sha": data["sha"], + "stats": data["stats"], + "files": files, + "filtered_files": filtered_files, + } + + async def grep( + self, + *, + pattern: str, + ref: Optional[str] = None, + paths: Optional[list[str]] = None, + case_sensitive: Optional[bool] = None, + file_filters: Optional[Dict[str, Any]] = None, + context: Optional[Dict[str, Any]] = None, + limits: Optional[Dict[str, Any]] = None, + pagination: Optional[Dict[str, Any]] = None, + ttl: Optional[int] = None, + ) -> GrepResult: + """Run grep against the repository. + + Args: + pattern: Regex pattern to search for + ref: Git ref to search (defaults to server-side default branch) + paths: Git pathspecs to restrict search + case_sensitive: Whether search is case-sensitive (default: server default) + file_filters: Optional filters with include_globs/exclude_globs/extension_filters + context: Optional context with before/after + limits: Optional limits with max_lines/max_matches_per_file + pagination: Optional pagination with cursor/limit + ttl: Token TTL in seconds + + Returns: + Grep results with matches, pagination info, and query metadata + """ + pattern_clean = pattern.strip() + if not pattern_clean: + raise ValueError("grep pattern is required") + + ttl_value = ttl or DEFAULT_TOKEN_TTL_SECONDS + jwt = self.generate_jwt(self._id, {"permissions": ["git:read"], "ttl": ttl_value}) + + body: Dict[str, Any] = { + "query": { + "pattern": pattern_clean, + } + } + + if case_sensitive is not None: + body["query"]["case_sensitive"] = bool(case_sensitive) + if ref: + body["rev"] = ref + if paths: + body["paths"] = paths + if file_filters: + body["file_filters"] = file_filters + if context: + body["context"] = context + if limits: + body["limits"] = limits + if pagination: + body["pagination"] = pagination + + url = f"{self.api_base_url}/api/v{self.api_version}/repos/grep" + + async with httpx.AsyncClient() as client: + response = await client.post( + url, + headers={ + "Authorization": f"Bearer {jwt}", + "Content-Type": "application/json", + "Code-Storage-Agent": get_user_agent(), + }, + json=body, + timeout=60.0, + ) + response.raise_for_status() + data = response.json() + + matches: List[GrepFileMatch] = [] + for match in data.get("matches", []): + lines: List[GrepLine] = [] + for line in match.get("lines", []): + lines.append( + { + "line_number": int(line["line_number"]), + "text": line["text"], + "type": line["type"], + } + ) + matches.append({"path": match["path"], "lines": lines}) + + result: GrepResult = { + "query": data["query"], + "repo": data["repo"], + "matches": matches, + "has_more": bool(data["has_more"]), + "next_cursor": data.get("next_cursor"), + } + return result + + async def pull_upstream( + self, + *, + ref: Optional[str] = None, + ttl: Optional[int] = None, + ) -> None: + """Pull from upstream repository. + + Args: + ref: Git ref to pull + ttl: Token TTL in seconds + + Raises: + ApiError: If pull fails + """ + ttl = ttl or DEFAULT_TOKEN_TTL_SECONDS + jwt = self.generate_jwt(self._id, {"permissions": ["git:write"], "ttl": ttl}) + + body = {} + if ref: + body["ref"] = ref + + url = f"{self.api_base_url}/api/v{self.api_version}/repos/pull-upstream" + + async with httpx.AsyncClient() as client: + response = await client.post( + url, + headers={ + "Authorization": f"Bearer {jwt}", + "Content-Type": "application/json", + "Code-Storage-Agent": get_user_agent(), + }, + json=body, + timeout=30.0, + ) + + if response.status_code != 202: + text = await response.aread() + raise Exception(f"Pull Upstream failed: {response.status_code} {text.decode()}") + + async def restore_commit( + self, + *, + target_branch: str, + target_commit_sha: str, + author: CommitSignature, + commit_message: Optional[str] = None, + expected_head_sha: Optional[str] = None, + committer: Optional[CommitSignature] = None, + ttl: Optional[int] = None, + ) -> RestoreCommitResult: + """Restore a previous commit. + + Args: + target_branch: Target branch name + target_commit_sha: Commit SHA to restore + author: Author signature (name and email) + commit_message: Optional commit message + expected_head_sha: Expected HEAD SHA for optimistic locking + committer: Optional committer signature (name and email) + ttl: Token TTL in seconds + + Returns: + Restore result with commit info + + Raises: + ValueError: If required options are missing + RefUpdateError: If restore fails + """ + target_branch = target_branch.strip() + if not target_branch: + raise ValueError("restoreCommit target_branch is required") + if target_branch.startswith("refs/"): + raise ValueError("restoreCommit target_branch must not include refs/ prefix") + + target_commit_sha = target_commit_sha.strip() + if not target_commit_sha: + raise ValueError("restoreCommit target_commit_sha is required") + + author_name = author.get("name", "").strip() + author_email = author.get("email", "").strip() + if not author_name or not author_email: + raise ValueError("restoreCommit author name and email are required") + + ttl = ttl or resolve_commit_ttl_seconds(None) + jwt = self.generate_jwt(self._id, {"permissions": ["git:write"], "ttl": ttl}) + + metadata: Dict[str, Any] = { + "target_branch": target_branch, + "target_commit_sha": target_commit_sha, + "author": {"name": author_name, "email": author_email}, + } + + if commit_message: + metadata["commit_message"] = commit_message.strip() + + if expected_head_sha: + metadata["expected_head_sha"] = expected_head_sha.strip() + + if committer: + committer_name = committer.get("name", "").strip() + committer_email = committer.get("email", "").strip() + if not committer_name or not committer_email: + raise ValueError( + "restoreCommit committer name and email are required when provided" + ) + metadata["committer"] = {"name": committer_name, "email": committer_email} + + url = f"{self.api_base_url}/api/v{self.api_version}/repos/restore-commit" + + async with httpx.AsyncClient() as client: + response = await client.post( + url, + headers={ + "Authorization": f"Bearer {jwt}", + "Content-Type": "application/json", + "Code-Storage-Agent": get_user_agent(), + }, + json={"metadata": metadata}, + timeout=180.0, + ) + + # Try to parse JSON response, fallback to text for 5xx errors + try: + payload = response.json() + except Exception as exc: + # If JSON parsing fails (e.g., CDN HTML response on 5xx), use text + status = infer_ref_update_reason(str(response.status_code)) + text = await response.aread() + message = f"Restore commit failed with HTTP {response.status_code}" + if response.reason_phrase: + message += f" {response.reason_phrase}" + # Include response body for debugging + if text: + message += f": {text.decode('utf-8', errors='replace')[:200]}" + raise RefUpdateError(message, status=status) from exc + + # Check if we got a result block (with or without commit) + if "result" in payload: + result = payload["result"] + ref_update = self._to_ref_update(result) + + # Check if the operation succeeded + if not result.get("success"): + # Failure - raise with server message and ref_update + raise RefUpdateError( + result.get( + "message", f"Restore commit failed with status {result.get('status')}" + ), + status=result.get("status"), + ref_update=ref_update, + ) + + # Success - must have commit field + if "commit" not in payload: + raise RefUpdateError( + "Restore commit succeeded but server did not return commit details", + status="unknown", + ) + + commit = payload["commit"] + return { + "commit_sha": commit["commit_sha"], + "tree_sha": commit["tree_sha"], + "target_branch": commit["target_branch"], + "pack_bytes": commit["pack_bytes"], + "ref_update": ref_update, + } + + # No result block - handle as generic error + status = infer_ref_update_reason(str(response.status_code)) + message = f"Restore commit failed with HTTP {response.status_code}" + if response.reason_phrase: + message += f" {response.reason_phrase}" + + raise RefUpdateError(message, status=status) + + def create_commit( + self, + *, + target_branch: str, + commit_message: str, + author: CommitSignature, + expected_head_sha: Optional[str] = None, + base_branch: Optional[str] = None, + ephemeral: Optional[bool] = None, + ephemeral_base: Optional[bool] = None, + committer: Optional[CommitSignature] = None, + ttl: Optional[int] = None, + ) -> CommitBuilder: + """Create a new commit builder. + + Args: + target_branch: Target branch name + commit_message: Commit message + author: Author signature (name and email) + expected_head_sha: Expected HEAD SHA for optimistic locking + base_branch: Base branch to branch off from + ephemeral: Whether to mark the target branch as ephemeral + ephemeral_base: Whether the base branch is ephemeral + committer: Optional committer signature (name and email) + ttl: Token TTL in seconds + + Returns: + Commit builder for fluent API + """ + options: CreateCommitOptions = { + "target_branch": target_branch, + "commit_message": commit_message, + "author": author, + } + if expected_head_sha: + options["expected_head_sha"] = expected_head_sha + if base_branch: + options["base_branch"] = base_branch + if ephemeral is not None: + options["ephemeral"] = bool(ephemeral) + if ephemeral_base is not None: + options["ephemeral_base"] = bool(ephemeral_base) + if committer: + options["committer"] = committer + + ttl = ttl or resolve_commit_ttl_seconds(None) + options["ttl"] = ttl + + def get_auth_token() -> str: + return self.generate_jwt( + self._id, + {"permissions": ["git:write"], "ttl": ttl}, + ) + + return CommitBuilderImpl( + options, + get_auth_token, + self.api_base_url, + self.api_version, + ) + + async def create_commit_from_diff( + self, + *, + target_branch: str, + commit_message: str, + diff: FileSource, + author: CommitSignature, + expected_head_sha: Optional[str] = None, + base_branch: Optional[str] = None, + ephemeral: Optional[bool] = None, + ephemeral_base: Optional[bool] = None, + committer: Optional[CommitSignature] = None, + ttl: Optional[int] = None, + ) -> CommitResult: + """Create a commit by applying a unified diff.""" + if diff is None: + raise ValueError("createCommitFromDiff diff is required") + + options: CreateCommitOptions = { + "target_branch": target_branch, + "commit_message": commit_message, + "author": author, + } + if expected_head_sha: + options["expected_head_sha"] = expected_head_sha + if base_branch: + options["base_branch"] = base_branch + if ephemeral is not None: + options["ephemeral"] = bool(ephemeral) + if ephemeral_base is not None: + options["ephemeral_base"] = bool(ephemeral_base) + if committer: + options["committer"] = committer + + ttl_value = ttl or resolve_commit_ttl_seconds(None) + options["ttl"] = ttl_value + + def get_auth_token() -> str: + return self.generate_jwt( + self._id, + {"permissions": ["git:write"], "ttl": ttl_value}, + ) + + return await send_diff_commit_request( + options, + diff, + get_auth_token, + self.api_base_url, + self.api_version, + ) + + async def _write_note( + self, + *, + action_label: str, + action: str, + sha: str, + note: str, + expected_ref_sha: Optional[str], + author: Optional[CommitSignature], + ttl: Optional[int], + ) -> NoteWriteResult: + sha_clean = sha.strip() + if not sha_clean: + raise ValueError(f"{action_label} sha is required") + + note_clean = note.strip() + if not note_clean: + raise ValueError(f"{action_label} note is required") + + ttl = ttl or DEFAULT_TOKEN_TTL_SECONDS + jwt = self.generate_jwt(self._id, {"permissions": ["git:write"], "ttl": ttl}) + + payload: Dict[str, Any] = { + "sha": sha_clean, + "action": action, + "note": note_clean, + } + if expected_ref_sha and expected_ref_sha.strip(): + payload["expected_ref_sha"] = expected_ref_sha.strip() + if author: + author_name = author.get("name", "").strip() + author_email = author.get("email", "").strip() + if not author_name or not author_email: + raise ValueError(f"{action_label} author name and email are required when provided") + payload["author"] = {"name": author_name, "email": author_email} + + url = f"{self.api_base_url}/api/v{self.api_version}/repos/notes" + + async with httpx.AsyncClient() as client: + response = await client.post( + url, + headers={ + "Authorization": f"Bearer {jwt}", + "Content-Type": "application/json", + "Code-Storage-Agent": get_user_agent(), + }, + json=payload, + timeout=30.0, + ) + + return self._parse_note_write_response(response, action_label) + + def _parse_note_write_response( + self, + response: httpx.Response, + action_label: str, + ) -> NoteWriteResult: + try: + payload = response.json() + except Exception as exc: + message = f"{action_label} failed with HTTP {response.status_code}" + if response.reason_phrase: + message += f" {response.reason_phrase}" + try: + body_text = response.text + except Exception: + body_text = "" + if body_text: + message += f": {body_text[:200]}" + raise ApiError(message, status_code=response.status_code, response=response) from exc + + if isinstance(payload, dict) and "error" in payload: + raise ApiError( + str(payload.get("error")), + status_code=response.status_code, + response=response, + ) + + if not isinstance(payload, dict) or "result" not in payload: + message = f"{action_label} failed with HTTP {response.status_code}" + if response.reason_phrase: + message += f" {response.reason_phrase}" + raise ApiError(message, status_code=response.status_code, response=response) + + result = payload.get("result", {}) + note_result: NoteWriteResult = { + "sha": payload.get("sha", ""), + "target_ref": payload.get("target_ref", ""), + "new_ref_sha": payload.get("new_ref_sha", ""), + "result": { + "success": bool(result.get("success")), + "status": str(result.get("status", "")), + }, + } + + base_commit = payload.get("base_commit") + if isinstance(base_commit, str) and base_commit: + note_result["base_commit"] = base_commit + if result.get("message"): + note_result["result"]["message"] = result.get("message") + + if not result.get("success"): + raise RefUpdateError( + result.get( + "message", + f"{action_label} failed with status {result.get('status')}", + ), + status=result.get("status"), + ref_update={ + "branch": payload.get("target_ref", ""), + "old_sha": payload.get("base_commit", ""), + "new_sha": payload.get("new_ref_sha", ""), + }, + ) + + return note_result + + def _to_ref_update(self, result: Dict[str, Any]) -> RefUpdate: + """Convert result to ref update.""" + return { + "branch": result.get("branch", ""), + "old_sha": result.get("old_sha", ""), + "new_sha": result.get("new_sha", ""), + } diff --git a/packages/git-storage-sdk-python/pierre_storage/types.py b/packages/git-storage-sdk-python/pierre_storage/types.py new file mode 100644 index 000000000..039d53d2e --- /dev/null +++ b/packages/git-storage-sdk-python/pierre_storage/types.py @@ -0,0 +1,670 @@ +"""Type definitions for Pierre Git Storage SDK.""" + +from datetime import datetime +from enum import Enum +from typing import Any, AsyncIterator, Dict, Iterable, List, Literal, Optional, Protocol, Union + +from typing_extensions import NotRequired, TypedDict + + +class DiffFileState(str, Enum): + """File state in a diff.""" + + ADDED = "added" + MODIFIED = "modified" + DELETED = "deleted" + RENAMED = "renamed" + COPIED = "copied" + TYPE_CHANGED = "type_changed" + UNMERGED = "unmerged" + UNKNOWN = "unknown" + + +class GitFileMode(str, Enum): + """Git file modes.""" + + REGULAR = "100644" + EXECUTABLE = "100755" + SYMLINK = "120000" + SUBMODULE = "160000" + + +# Configuration types +class GitStorageOptions(TypedDict, total=False): + """Options for GitStorage client.""" + + name: str # required + key: str # required + api_base_url: Optional[str] + storage_base_url: Optional[str] + api_version: Optional[int] + default_ttl: Optional[int] + + +class GitHubBaseRepo(TypedDict, total=False): + """Base repository configuration for GitHub sync.""" + + provider: Literal["github"] # required + owner: str # required + name: str # required + default_branch: Optional[str] + + +class ForkBaseRepo(TypedDict, total=False): + """Base repository configuration for code storage forks.""" + + id: str # required + ref: Optional[str] + sha: Optional[str] + + +BaseRepo = Union[GitHubBaseRepo, ForkBaseRepo] + + +class DeleteRepoResult(TypedDict): + """Result from deleting a repository.""" + + repo_id: str + message: str + + +# Repository list types +class RepoBaseInfo(TypedDict): + """Base repository info for listed repositories.""" + + provider: str + owner: str + name: str + + +class RepoInfo(TypedDict, total=False): + """Repository info in list responses.""" + + repo_id: str + url: str + default_branch: str + created_at: str + base_repo: NotRequired[RepoBaseInfo] + + +class ListReposResult(TypedDict): + """Result from listing repositories.""" + + repos: List[RepoInfo] + next_cursor: Optional[str] + has_more: bool + + +# Removed: GetRemoteURLOptions - now uses **kwargs +# Removed: CreateRepoOptions - now uses **kwargs +# Removed: FindOneOptions - now uses **kwargs + + +# File and branch types +# Removed: GetFileOptions - now uses **kwargs +# Removed: ListFilesOptions - now uses **kwargs + + +class ListFilesResult(TypedDict): + """Result from listing files.""" + + paths: List[str] + ref: str + + +# Removed: ListBranchesOptions - now uses **kwargs + + +class BranchInfo(TypedDict): + """Information about a branch.""" + + cursor: str + name: str + head_sha: str + created_at: str + + +class ListBranchesResult(TypedDict): + """Result from listing branches.""" + + branches: List[BranchInfo] + next_cursor: Optional[str] + has_more: bool + + +class CreateBranchResult(TypedDict): + """Result from creating a branch.""" + + message: str + target_branch: str + target_is_ephemeral: bool + commit_sha: NotRequired[str] + + +# Removed: ListCommitsOptions - now uses **kwargs + + +class CommitInfo(TypedDict): + """Information about a commit.""" + + sha: str + message: str + author_name: str + author_email: str + committer_name: str + committer_email: str + date: datetime + raw_date: str + + +class ListCommitsResult(TypedDict): + """Result from listing commits.""" + + commits: List[CommitInfo] + next_cursor: Optional[str] + has_more: bool + + +# Git notes types +class NoteReadResult(TypedDict): + """Result from reading a git note.""" + + sha: str + note: str + ref_sha: str + + +class NoteWriteResultPayload(TypedDict): + """Result payload for note writes.""" + + success: bool + status: str + message: NotRequired[str] + + +class NoteWriteResult(TypedDict): + """Result from writing a git note.""" + + sha: str + target_ref: str + base_commit: NotRequired[str] + new_ref_sha: str + result: NoteWriteResultPayload + + +# Diff types +class DiffStats(TypedDict): + """Statistics about a diff.""" + + files: int + additions: int + deletions: int + changes: int + + +class FileDiff(TypedDict): + """A file diff entry.""" + + path: str + state: DiffFileState + raw_state: str + old_path: Optional[str] + raw: str + bytes: int + is_eof: bool + + +class FilteredFile(TypedDict): + """A filtered file entry.""" + + path: str + state: DiffFileState + raw_state: str + old_path: Optional[str] + bytes: int + is_eof: bool + + +# Removed: GetBranchDiffOptions - now uses **kwargs + + +class GetBranchDiffResult(TypedDict): + """Result from getting branch diff.""" + + branch: str + base: str + stats: DiffStats + files: List[FileDiff] + filtered_files: List[FilteredFile] + + +# Removed: GetCommitDiffOptions - now uses **kwargs + + +class GetCommitDiffResult(TypedDict): + """Result from getting commit diff.""" + + sha: str + stats: DiffStats + files: List[FileDiff] + filtered_files: List[FilteredFile] + + +# Grep types +class GrepLine(TypedDict): + """A single line in grep results.""" + + line_number: int + text: str + type: str + + +class GrepFileMatch(TypedDict): + """A file with grep matches.""" + + path: str + lines: List[GrepLine] + + +class GrepQueryResult(TypedDict): + """Query information for grep results.""" + + pattern: str + case_sensitive: bool + + +class GrepRepoInfo(TypedDict): + """Repository information for grep results.""" + + ref: str + commit: str + + +class GrepResult(TypedDict): + """Result from running grep.""" + + query: GrepQueryResult + repo: GrepRepoInfo + matches: List[GrepFileMatch] + next_cursor: NotRequired[Optional[str]] + has_more: bool + + +# Commit types +class CommitSignature(TypedDict): + """Git commit signature.""" + + name: str + email: str + + +class CreateCommitOptions(TypedDict, total=False): + """Options for creating a commit.""" + + target_branch: str # required + commit_message: str # required + author: CommitSignature # required + expected_head_sha: Optional[str] + base_branch: Optional[str] + ephemeral: bool + ephemeral_base: bool + committer: Optional[CommitSignature] + ttl: int + + +# Removed: CommitFileOptions - now uses **kwargs with explicit mode parameter + + +class RefUpdate(TypedDict): + """Information about a ref update.""" + + branch: str + old_sha: str + new_sha: str + + +class CommitResult(TypedDict): + """Result from creating a commit.""" + + commit_sha: str + tree_sha: str + target_branch: str + pack_bytes: int + blob_count: int + ref_update: RefUpdate + + +# Removed: RestoreCommitOptions - now uses **kwargs + + +class RestoreCommitResult(TypedDict): + """Result from restoring a commit.""" + + commit_sha: str + tree_sha: str + target_branch: str + pack_bytes: int + ref_update: RefUpdate + + +# Removed: PullUpstreamOptions - now uses **kwargs + + +# File source types for commits +FileSource = Union[ + str, + bytes, + bytearray, + memoryview, + Iterable[bytes], + AsyncIterator[bytes], +] + + +# Protocol for commit builder +class CommitBuilder(Protocol): + """Protocol for commit builder.""" + + def add_file( + self, + path: str, + source: FileSource, + *, + mode: Optional[GitFileMode] = None, + ) -> "CommitBuilder": + """Add a file to the commit.""" + ... + + def add_file_from_string( + self, + path: str, + contents: str, + *, + encoding: str = "utf-8", + mode: Optional[GitFileMode] = None, + ) -> "CommitBuilder": + """Add a file from a string.""" + ... + + def delete_path(self, path: str) -> "CommitBuilder": + """Delete a path from the commit.""" + ... + + async def send(self) -> CommitResult: + """Send the commit to the server.""" + ... + + +# Protocol for repository +class Repo(Protocol): + """Protocol for repository.""" + + @property + def id(self) -> str: + """Get the repository ID.""" + ... + + @property + def default_branch(self) -> str: + """Get the default branch name.""" + ... + + async def get_remote_url( + self, + *, + permissions: Optional[list[str]] = None, + ttl: Optional[int] = None, + ) -> str: + """Get the remote URL for the repository.""" + ... + + async def get_file_stream( + self, + *, + path: str, + ref: Optional[str] = None, + ephemeral: Optional[bool] = None, + ttl: Optional[int] = None, + ) -> Any: # httpx.Response + """Get a file as a stream.""" + ... + + async def list_files( + self, + *, + ref: Optional[str] = None, + ephemeral: Optional[bool] = None, + ttl: Optional[int] = None, + ) -> ListFilesResult: + """List files in the repository.""" + ... + + async def list_branches( + self, + *, + cursor: Optional[str] = None, + limit: Optional[int] = None, + ttl: Optional[int] = None, + ) -> ListBranchesResult: + """List branches in the repository.""" + ... + + async def create_branch( + self, + *, + base_branch: str, + target_branch: str, + base_is_ephemeral: bool = False, + target_is_ephemeral: bool = False, + ttl: Optional[int] = None, + ) -> CreateBranchResult: + """Create or promote a branch.""" + ... + + async def promote_ephemeral_branch( + self, + *, + base_branch: str, + target_branch: Optional[str] = None, + ttl: Optional[int] = None, + ) -> CreateBranchResult: + """Promote an ephemeral branch to a persistent target branch.""" + ... + + async def list_commits( + self, + *, + branch: Optional[str] = None, + cursor: Optional[str] = None, + limit: Optional[int] = None, + ttl: Optional[int] = None, + ) -> ListCommitsResult: + """List commits in the repository.""" + ... + + async def get_note( + self, + *, + sha: str, + ttl: Optional[int] = None, + ) -> NoteReadResult: + """Read a git note.""" + ... + + async def create_note( + self, + *, + sha: str, + note: str, + expected_ref_sha: Optional[str] = None, + author: Optional["CommitSignature"] = None, + ttl: Optional[int] = None, + ) -> NoteWriteResult: + """Create a git note.""" + ... + + async def append_note( + self, + *, + sha: str, + note: str, + expected_ref_sha: Optional[str] = None, + author: Optional["CommitSignature"] = None, + ttl: Optional[int] = None, + ) -> NoteWriteResult: + """Append to a git note.""" + ... + + async def delete_note( + self, + *, + sha: str, + expected_ref_sha: Optional[str] = None, + author: Optional["CommitSignature"] = None, + ttl: Optional[int] = None, + ) -> NoteWriteResult: + """Delete a git note.""" + ... + + async def get_branch_diff( + self, + *, + branch: str, + base: Optional[str] = None, + ephemeral: Optional[bool] = None, + ephemeral_base: Optional[bool] = None, + paths: Optional[list[str]] = None, + ttl: Optional[int] = None, + ) -> GetBranchDiffResult: + """Get diff between branches.""" + ... + + async def get_commit_diff( + self, + *, + sha: str, + base_sha: Optional[str] = None, + paths: Optional[list[str]] = None, + ttl: Optional[int] = None, + ) -> GetCommitDiffResult: + """Get diff for a commit.""" + ... + + async def grep( + self, + *, + pattern: str, + ref: Optional[str] = None, + paths: Optional[list[str]] = None, + case_sensitive: Optional[bool] = None, + file_filters: Optional[Dict[str, Any]] = None, + context: Optional[Dict[str, Any]] = None, + limits: Optional[Dict[str, Any]] = None, + pagination: Optional[Dict[str, Any]] = None, + ttl: Optional[int] = None, + ) -> "GrepResult": + """Run grep against the repository.""" + ... + + async def pull_upstream( + self, + *, + ref: Optional[str] = None, + ttl: Optional[int] = None, + ) -> None: + """Pull from upstream repository.""" + ... + + async def restore_commit( + self, + *, + target_branch: str, + target_commit_sha: str, + author: CommitSignature, + commit_message: Optional[str] = None, + expected_head_sha: Optional[str] = None, + committer: Optional[CommitSignature] = None, + ttl: Optional[int] = None, + ) -> RestoreCommitResult: + """Restore a previous commit.""" + ... + + def create_commit( + self, + *, + target_branch: str, + commit_message: str, + author: CommitSignature, + expected_head_sha: Optional[str] = None, + base_branch: Optional[str] = None, + ephemeral: Optional[bool] = None, + ephemeral_base: Optional[bool] = None, + committer: Optional[CommitSignature] = None, + ttl: Optional[int] = None, + ) -> CommitBuilder: + """Create a new commit builder.""" + ... + + async def create_commit_from_diff( + self, + *, + target_branch: str, + commit_message: str, + diff: FileSource, + author: CommitSignature, + expected_head_sha: Optional[str] = None, + base_branch: Optional[str] = None, + ephemeral: Optional[bool] = None, + ephemeral_base: Optional[bool] = None, + committer: Optional[CommitSignature] = None, + ttl: Optional[int] = None, + ) -> CommitResult: + """Create a commit by applying a diff.""" + ... + + +# Webhook types +class WebhookValidationOptions(TypedDict, total=False): + """Options for webhook validation.""" + + max_age_seconds: int + + +class WebhookPushEvent(TypedDict): + """Webhook push event.""" + + type: Literal["push"] + repository: Dict[str, str] + ref: str + before: str + after: str + customer_id: str + pushed_at: datetime + raw_pushed_at: str + + +class ParsedWebhookSignature(TypedDict): + """Parsed webhook signature.""" + + timestamp: str + signature: str + + +class WebhookUnknownEvent(TypedDict): + """Fallback webhook event for unknown types.""" + + type: str + raw: Any + + +WebhookEventPayload = Union[WebhookPushEvent, WebhookUnknownEvent] + + +class WebhookValidationResult(TypedDict, total=False): + """Result from webhook validation.""" + + valid: bool + error: Optional[str] + event_type: Optional[str] + timestamp: Optional[int] + payload: WebhookEventPayload diff --git a/packages/git-storage-sdk-python/pierre_storage/version.py b/packages/git-storage-sdk-python/pierre_storage/version.py new file mode 100644 index 000000000..7dd10cec8 --- /dev/null +++ b/packages/git-storage-sdk-python/pierre_storage/version.py @@ -0,0 +1,13 @@ +"""Version information for Pierre Storage SDK.""" + +PACKAGE_NAME = "code-storage-py-sdk" +PACKAGE_VERSION = "0.12.1" + + +def get_user_agent() -> str: + """Get user agent string for API requests. + + Returns: + User agent string in format: {name}/{version} + """ + return f"{PACKAGE_NAME}/{PACKAGE_VERSION}" diff --git a/packages/git-storage-sdk-python/pierre_storage/webhook.py b/packages/git-storage-sdk-python/pierre_storage/webhook.py new file mode 100644 index 000000000..71c6b64f8 --- /dev/null +++ b/packages/git-storage-sdk-python/pierre_storage/webhook.py @@ -0,0 +1,240 @@ +"""Webhook validation utilities for Pierre Git Storage SDK.""" + +import hashlib +import hmac +import json +import time +from datetime import datetime +from typing import Any, Dict, Optional, Sequence, Union + +from pierre_storage.types import ( + ParsedWebhookSignature, + WebhookEventPayload, + WebhookPushEvent, + WebhookUnknownEvent, + WebhookValidationOptions, + WebhookValidationResult, +) + +__all__ = [ + "WebhookPushEvent", + "parse_signature_header", + "validate_webhook", + "validate_webhook_signature", +] + +PayloadInput = Union[str, bytes, bytearray] +HeaderValue = Union[str, bytes, Sequence[str]] +HeaderMap = Dict[str, HeaderValue] + + +def parse_signature_header(signature: str) -> Optional[ParsedWebhookSignature]: + """Parse the X-Pierre-Signature header. + + Args: + signature: The signature header value (format: "t=timestamp,sha256=signature") + + Returns: + Parsed signature components if valid, otherwise None. + """ + if not isinstance(signature, str): + return None + + timestamp = "" + sig = "" + + for part in signature.split(","): + if "=" not in part: + continue + key, value = part.split("=", 1) + if key == "t": + timestamp = value + elif key == "sha256": + sig = value + + if not timestamp or not sig: + return None + + return {"timestamp": timestamp, "signature": sig} + + +def validate_webhook_signature( + payload: PayloadInput, + signature_header: str, + secret: str, + options: Optional[WebhookValidationOptions] = None, +) -> WebhookValidationResult: + """Validate a webhook signature and timestamp.""" + if not secret: + return {"valid": False, "error": "Empty secret is not allowed"} + + parsed = parse_signature_header(signature_header) + if not parsed: + return {"valid": False, "error": "Invalid signature header format"} + + try: + timestamp = int(parsed["timestamp"]) + except (TypeError, ValueError): + return {"valid": False, "error": "Invalid timestamp in signature"} + + max_age = (options or {}).get("max_age_seconds", 300) + if max_age and max_age > 0: + now = int(time.time()) + age = now - timestamp + if age > max_age: + return { + "valid": False, + "error": f"Webhook timestamp too old ({age} seconds)", + "timestamp": timestamp, + } + if age < -60: + return { + "valid": False, + "error": "Webhook timestamp is in the future", + "timestamp": timestamp, + } + + payload_str = payload.decode("utf-8") if isinstance(payload, (bytes, bytearray)) else payload + if not isinstance(payload_str, str): + payload_str = str(payload_str) + + signed_payload = f"{parsed['timestamp']}.{payload_str}" + expected_signature = hmac.new( + secret.encode("utf-8"), + signed_payload.encode("utf-8"), + hashlib.sha256, + ).hexdigest() + + expected_bytes = expected_signature.encode("utf-8") + actual_bytes = parsed["signature"].encode("utf-8") + + if len(expected_bytes) != len(actual_bytes): + return {"valid": False, "error": "Invalid signature", "timestamp": timestamp} + + if not hmac.compare_digest(expected_bytes, actual_bytes): + return {"valid": False, "error": "Invalid signature", "timestamp": timestamp} + + return {"valid": True, "timestamp": timestamp} + + +def validate_webhook( + payload: PayloadInput, + headers: HeaderMap, + secret: str, + options: Optional[WebhookValidationOptions] = None, +) -> WebhookValidationResult: + """Validate a webhook request and return structured payload data.""" + signature_header_raw = headers.get("x-pierre-signature") or headers.get("X-Pierre-Signature") + signature_header = _normalize_header_value(signature_header_raw) + if signature_header is None: + return { + "valid": False, + "error": "Missing or invalid X-Pierre-Signature header", + } + + event_header_raw = headers.get("x-pierre-event") or headers.get("X-Pierre-Event") + event_header = _normalize_header_value(event_header_raw) + if event_header is None: + return { + "valid": False, + "error": "Missing or invalid X-Pierre-Event header", + } + + validation = validate_webhook_signature(payload, signature_header, secret, options) + if not validation.get("valid"): + return validation + + payload_str = payload.decode("utf-8") if isinstance(payload, (bytes, bytearray)) else payload + if not isinstance(payload_str, str): + payload_str = str(payload_str) + + try: + data = json.loads(payload_str) + except json.JSONDecodeError: + return { + "valid": False, + "error": "Invalid JSON payload", + "timestamp": validation.get("timestamp"), + } + + event_type = str(event_header) + conversion = _convert_webhook_payload(event_type, data) + if not conversion["valid"]: + error_msg = conversion.get("error", "Unknown error") + assert isinstance(error_msg, str) + return { + "valid": False, + "error": error_msg, + "timestamp": validation.get("timestamp"), + } + + payload_data = conversion["payload"] + assert isinstance(payload_data, dict) + return { + "valid": True, + "event_type": event_type, + "timestamp": validation.get("timestamp"), + "payload": payload_data, + } + + +def parse_push_event(payload: Dict[str, Any]) -> WebhookPushEvent: + """Parse a push event webhook payload. + + Args: + payload: Parsed JSON webhook payload + + Returns: + Parsed push event + + Raises: + ValueError: If payload is not a valid push event + """ + try: + pushed_at_str = payload.get("pushed_at", "") + pushed_at = datetime.fromisoformat(pushed_at_str.replace("Z", "+00:00")) + + return { + "type": "push", + "repository": payload["repository"], + "ref": payload["ref"], + "before": payload["before"], + "after": payload["after"], + "customer_id": payload["customer_id"], + "pushed_at": pushed_at, + "raw_pushed_at": pushed_at_str, + } + except (KeyError, ValueError) as e: + raise ValueError(f"Invalid push event payload: {e}") from e + + +def _normalize_header_value(value: Optional[HeaderValue]) -> Optional[str]: + """Normalize header values to a single string.""" + if value is None: + return None + if isinstance(value, (list, tuple)): + return None + if isinstance(value, bytes): + try: + return value.decode("utf-8") + except UnicodeDecodeError: + return None + if isinstance(value, str) and value.strip(): + return value + return None + + +def _convert_webhook_payload( + event_type: str, + raw: Any, +) -> Dict[str, Union[bool, str, WebhookEventPayload]]: + """Convert raw webhook payload into structured event data.""" + if event_type == "push": + try: + payload = parse_push_event(raw) + except (ValueError, TypeError) as exc: + return {"valid": False, "error": f"Invalid push payload: {exc}"} + return {"valid": True, "payload": payload} + + fallback_payload: WebhookUnknownEvent = {"type": event_type, "raw": raw} + return {"valid": True, "payload": fallback_payload} diff --git a/packages/git-storage-sdk-python/pyproject.toml b/packages/git-storage-sdk-python/pyproject.toml new file mode 100644 index 000000000..4bd4b5a41 --- /dev/null +++ b/packages/git-storage-sdk-python/pyproject.toml @@ -0,0 +1,85 @@ +[build-system] +requires = ["setuptools>=61.0", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "pierre-storage" +version = "0.12.1" +description = "Pierre Git Storage SDK for Python" +readme = "README.md" +license = "MIT" +authors = [ + {name = "Pierre", email = "support@pierre.io"} +] +requires-python = ">=3.9" +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Topic :: Software Development :: Libraries :: Python Modules", +] +dependencies = [ + "httpx>=0.27.0", + "pyjwt>=2.8.0", + "cryptography>=41.0.0", + "pydantic>=2.0.0", + "typing-extensions>=4.5.0; python_version < '3.10'", +] + +[project.optional-dependencies] +dev = [ + "pytest>=7.4.0", + "pytest-asyncio>=0.21.0", + "pytest-cov>=4.1.0", + "mypy>=1.5.0", + "ruff>=0.1.0", + "build>=1.0.0", + "twine>=4.0.0", +] + +[project.urls] +Homepage = "https://pierre.io" +Documentation = "https://docs.pierre.io" +Repository = "https://github.com/pierrecomputer/pierre" +Issues = "https://github.com/pierrecomputer/pierre/issues" + +[tool.setuptools.packages.find] +include = ["pierre_storage*"] + +[tool.pytest.ini_options] +testpaths = ["tests"] +asyncio_mode = "auto" +addopts = "--cov=pierre_storage --cov-report=term-missing" + +[tool.mypy] +python_version = "3.9" +strict = true +warn_return_any = true +warn_unused_configs = true +disallow_untyped_defs = true + +[tool.ruff] +line-length = 100 +target-version = "py39" + +[tool.ruff.lint] +select = ["E", "F", "I", "N", "W", "B", "C4", "SIM"] +ignore = ["E501"] + +[tool.uv] +dev-dependencies = [ + "pytest>=7.4.0", + "pytest-asyncio>=0.21.0", + "pytest-cov>=4.1.0", + "mypy>=1.5.0", + "ruff>=0.1.0", + "build>=1.0.0", + "twine>=4.0.0", +] +constraint-dependencies = [ + "urllib3>=2.6.3", +] diff --git a/packages/git-storage-sdk-python/scripts/setup.sh b/packages/git-storage-sdk-python/scripts/setup.sh new file mode 100755 index 000000000..32bb11d01 --- /dev/null +++ b/packages/git-storage-sdk-python/scripts/setup.sh @@ -0,0 +1,13 @@ +#!/bin/bash +set -e + +echo "Creating virtual environment..." +python3 -m venv venv + +echo "Upgrading pip..." +./venv/bin/pip install --upgrade pip + +echo "Installing dependencies..." +./venv/bin/pip install -e '.[dev]' + +echo "Setup complete!" diff --git a/packages/git-storage-sdk-python/tests/__init__.py b/packages/git-storage-sdk-python/tests/__init__.py new file mode 100644 index 000000000..41f543d63 --- /dev/null +++ b/packages/git-storage-sdk-python/tests/__init__.py @@ -0,0 +1 @@ +"""Tests for Pierre Git Storage SDK.""" diff --git a/packages/git-storage-sdk-python/tests/conftest.py b/packages/git-storage-sdk-python/tests/conftest.py new file mode 100644 index 000000000..42179f265 --- /dev/null +++ b/packages/git-storage-sdk-python/tests/conftest.py @@ -0,0 +1,28 @@ +"""Pytest configuration and fixtures.""" + +import pytest + +# Test private key (ES256) +TEST_KEY = """-----BEGIN PRIVATE KEY----- +MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgy3DPdzzsP6tOOvmo +rjbx6L7mpFmKKL2hNWNW3urkN8ehRANCAAQ7/DPhGH3kaWl0YEIO+W9WmhyCclDG +yTh6suablSura7ZDG8hpm3oNsq/ykC3Scfsw6ZTuuVuLlXKV/be/Xr0d +-----END PRIVATE KEY-----""" + + +@pytest.fixture +def test_key() -> str: + """Return test private key.""" + return TEST_KEY + + +@pytest.fixture +def git_storage_options(test_key: str) -> dict: + """Return GitStorage options for testing.""" + return { + "name": "test-customer", + "key": test_key, + "api_base_url": "https://api.test.code.storage", + "storage_base_url": "test.code.storage", + "api_version": 1, + } diff --git a/packages/git-storage-sdk-python/tests/test_client.py b/packages/git-storage-sdk-python/tests/test_client.py new file mode 100644 index 000000000..d92c49f5c --- /dev/null +++ b/packages/git-storage-sdk-python/tests/test_client.py @@ -0,0 +1,873 @@ +"""Tests for GitStorage client.""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import jwt +import pytest + +from pierre_storage import GitStorage, create_client, generate_jwt +from pierre_storage.version import get_user_agent +from pierre_storage.errors import ApiError + + +class TestGitStorage: + """Tests for GitStorage class.""" + + def test_create_instance(self, git_storage_options: dict) -> None: + """Test creating GitStorage instance.""" + storage = GitStorage(git_storage_options) + assert storage is not None + assert isinstance(storage, GitStorage) + + def test_store_key(self, git_storage_options: dict, test_key: str) -> None: + """Test that key is stored.""" + storage = GitStorage(git_storage_options) + config = storage.get_config() + assert config["key"] == test_key + + def test_missing_options(self) -> None: + """Test error when options are missing.""" + with pytest.raises(ValueError, match="GitStorage requires a name and key"): + GitStorage({}) # type: ignore + + def test_null_key(self, test_key: str) -> None: + """Test error when key is null.""" + with pytest.raises(ValueError, match="GitStorage requires a name and key"): + GitStorage({"name": "test", "key": None}) # type: ignore + + def test_empty_key(self) -> None: + """Test error when key is empty.""" + with pytest.raises(ValueError, match="GitStorage key must be a non-empty string"): + GitStorage({"name": "test", "key": ""}) + + def test_empty_name(self, test_key: str) -> None: + """Test error when name is empty.""" + with pytest.raises(ValueError, match="GitStorage name must be a non-empty string"): + GitStorage({"name": "", "key": test_key}) + + def test_whitespace_key(self) -> None: + """Test error when key is whitespace.""" + with pytest.raises(ValueError, match="GitStorage key must be a non-empty string"): + GitStorage({"name": "test", "key": " "}) + + def test_whitespace_name(self, test_key: str) -> None: + """Test error when name is whitespace.""" + with pytest.raises(ValueError, match="GitStorage name must be a non-empty string"): + GitStorage({"name": " ", "key": test_key}) + + def test_non_string_key(self) -> None: + """Test error when key is not a string.""" + with pytest.raises(ValueError, match="GitStorage key must be a non-empty string"): + GitStorage({"name": "test", "key": 123}) # type: ignore + + def test_non_string_name(self, test_key: str) -> None: + """Test error when name is not a string.""" + with pytest.raises(ValueError, match="GitStorage name must be a non-empty string"): + GitStorage({"name": 123, "key": test_key}) # type: ignore + + @pytest.mark.asyncio + async def test_create_repo(self, git_storage_options: dict) -> None: + """Test creating a repository.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.is_success = True + mock_response.json.return_value = {"repo_id": "test-repo", "url": "https://test.git"} + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=mock_response + ) + + repo = await storage.create_repo(id="test-repo") + assert repo is not None + assert repo.id == "test-repo" + + @pytest.mark.asyncio + async def test_create_repo_with_base_repo(self, git_storage_options: dict) -> None: + """Test creating a repository with GitHub sync.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.is_success = True + mock_response.json.return_value = {"repo_id": "test-repo", "url": "https://test.git"} + + with patch("httpx.AsyncClient") as mock_client: + mock_post = AsyncMock(return_value=mock_response) + mock_client.return_value.__aenter__.return_value.post = mock_post + + repo = await storage.create_repo( + id="test-repo", + base_repo={ + "owner": "octocat", + "name": "Hello-World", + "default_branch": "main", + }, + ) + assert repo is not None + assert repo.id == "test-repo" + + # Verify the request was made with base_repo in the body + call_kwargs = mock_post.call_args[1] + body = call_kwargs["json"] + assert "base_repo" in body + assert body["base_repo"]["provider"] == "github" + assert body["base_repo"]["owner"] == "octocat" + assert body["base_repo"]["name"] == "Hello-World" + assert body["base_repo"]["default_branch"] == "main" + + @pytest.mark.asyncio + async def test_create_repo_with_base_repo_forces_github_provider( + self, git_storage_options: dict + ) -> None: + """Test that base_repo forces provider to 'github'.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.is_success = True + + with patch("httpx.AsyncClient") as mock_client: + mock_post = AsyncMock(return_value=mock_response) + mock_client.return_value.__aenter__.return_value.post = mock_post + + # Create repo without provider in base_repo + await storage.create_repo( + id="test-repo", + base_repo={ + "owner": "octocat", + "name": "Hello-World", + }, + ) + + # Verify provider was forced to 'github' + call_kwargs = mock_post.call_args[1] + body = call_kwargs["json"] + assert body["base_repo"]["provider"] == "github" + + @pytest.mark.asyncio + async def test_create_repo_with_fork_base_repo(self, git_storage_options: dict) -> None: + """Test creating a forked repository.""" + storage = GitStorage(git_storage_options) + + mock_post_response = MagicMock() + mock_post_response.status_code = 200 + mock_post_response.is_success = True + mock_post_response.json.return_value = {"repo_id": "test-repo"} + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(return_value=mock_post_response) + + repo = await storage.create_repo( + id="test-repo", + base_repo={ + "id": "template-repo", + "ref": "develop", + }, + ) + assert repo.default_branch == "main" + + call_kwargs = client_instance.post.call_args[1] + body = call_kwargs["json"] + assert "default_branch" not in body + assert body["base_repo"]["provider"] == "code" + assert body["base_repo"]["owner"] == "test-customer" + assert body["base_repo"]["name"] == "template-repo" + assert body["base_repo"]["operation"] == "fork" + assert body["base_repo"]["ref"] == "develop" + + token = body["base_repo"]["auth"]["token"] + payload = jwt.decode(token, options={"verify_signature": False}) + assert payload["repo"] == "template-repo" + assert payload["scopes"] == ["git:read"] + + @pytest.mark.asyncio + async def test_create_repo_conflict(self, git_storage_options: dict) -> None: + """Test creating a repository that already exists.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.status_code = 409 + mock_response.is_success = False + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=mock_response + ) + + with pytest.raises(ApiError, match="Repository already exists"): + await storage.create_repo(id="existing-repo") + + @pytest.mark.asyncio + async def test_list_repos(self, git_storage_options: dict) -> None: + """Test listing repositories.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.is_success = True + mock_response.json.return_value = { + "repos": [ + { + "repo_id": "repo-1", + "url": "owner/repo-1", + "default_branch": "main", + "created_at": "2024-01-01T00:00:00Z", + "base_repo": {"provider": "github", "owner": "owner", "name": "repo-1"}, + } + ], + "next_cursor": None, + "has_more": False, + } + + with patch("httpx.AsyncClient") as mock_client: + mock_get = AsyncMock(return_value=mock_response) + mock_client.return_value.__aenter__.return_value.get = mock_get + + result = await storage.list_repos() + assert result["has_more"] is False + assert result["repos"][0]["repo_id"] == "repo-1" + + call_kwargs = mock_get.call_args[1] + headers = call_kwargs["headers"] + token = headers["Authorization"].replace("Bearer ", "") + payload = jwt.decode(token, options={"verify_signature": False}) + assert payload["scopes"] == ["org:read"] + assert payload["repo"] == "org" + + @pytest.mark.asyncio + async def test_list_repos_with_cursor(self, git_storage_options: dict) -> None: + """Test listing repositories with pagination.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.is_success = True + mock_response.json.return_value = { + "repos": [], + "next_cursor": "next", + "has_more": True, + } + + with patch("httpx.AsyncClient") as mock_client: + mock_get = AsyncMock(return_value=mock_response) + mock_client.return_value.__aenter__.return_value.get = mock_get + + await storage.list_repos(cursor="cursor-1", limit=10) + + call_args = mock_get.call_args[0] + api_url = call_args[0] + assert "cursor=cursor-1" in api_url + assert "limit=10" in api_url + + @pytest.mark.asyncio + async def test_find_one(self, git_storage_options: dict) -> None: + """Test finding a repository.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.is_success = True + mock_response.json.return_value = {"id": "test-repo"} + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.get = AsyncMock( + return_value=mock_response + ) + + repo = await storage.find_one(id="test-repo") + assert repo is not None + assert repo.id == "test-repo" + + @pytest.mark.asyncio + async def test_find_one_not_found(self, git_storage_options: dict) -> None: + """Test finding a repository that doesn't exist.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.status_code = 404 + mock_response.is_success = False + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.get = AsyncMock( + return_value=mock_response + ) + + repo = await storage.find_one(id="nonexistent") + assert repo is None + + @pytest.mark.asyncio + async def test_delete_repo(self, git_storage_options: dict) -> None: + """Test deleting a repository.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.is_success = True + mock_response.json.return_value = { + "repo_id": "test-repo", + "message": "Repository test-repo deletion initiated.", + } + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.delete = AsyncMock( + return_value=mock_response + ) + + result = await storage.delete_repo(id="test-repo") + assert result["repo_id"] == "test-repo" + assert "deletion initiated" in result["message"] + + @pytest.mark.asyncio + async def test_delete_repo_sends_correct_request(self, git_storage_options: dict) -> None: + """Test that delete_repo sends DELETE request with repo:write scope.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.is_success = True + mock_response.json.return_value = { + "repo_id": "test-repo", + "message": "Repository deletion initiated.", + } + + with patch("httpx.AsyncClient") as mock_client: + mock_delete = AsyncMock(return_value=mock_response) + mock_client.return_value.__aenter__.return_value.delete = mock_delete + + await storage.delete_repo(id="test-repo") + + # Verify the DELETE request was made to the correct URL + mock_delete.assert_called_once() + call_args = mock_delete.call_args[0] + api_url = call_args[0] + assert api_url == "https://api.test.code.storage/api/v1/repos/delete" + + # Verify headers include Authorization with repo:write scope + call_kwargs = mock_delete.call_args[1] + headers = call_kwargs["headers"] + assert "Authorization" in headers + assert headers["Authorization"].startswith("Bearer ") + + # Decode JWT and verify scope + token = headers["Authorization"].replace("Bearer ", "") + payload = jwt.decode(token, options={"verify_signature": False}) + assert payload["scopes"] == ["repo:write"] + assert payload["repo"] == "test-repo" + + @pytest.mark.asyncio + async def test_delete_repo_not_found(self, git_storage_options: dict) -> None: + """Test deleting a repository that doesn't exist.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.status_code = 404 + mock_response.is_success = False + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.delete = AsyncMock( + return_value=mock_response + ) + + with pytest.raises(ApiError, match="Repository not found"): + await storage.delete_repo(id="nonexistent") + + @pytest.mark.asyncio + async def test_delete_repo_already_deleted(self, git_storage_options: dict) -> None: + """Test deleting a repository that was already deleted.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.status_code = 409 + mock_response.is_success = False + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.delete = AsyncMock( + return_value=mock_response + ) + + with pytest.raises(ApiError, match="Repository already deleted"): + await storage.delete_repo(id="already-deleted") + + @pytest.mark.asyncio + async def test_delete_repo_with_custom_ttl(self, git_storage_options: dict) -> None: + """Test delete_repo honors custom TTL.""" + storage = GitStorage(git_storage_options) + custom_ttl = 300 + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.is_success = True + mock_response.json.return_value = { + "repo_id": "test-repo", + "message": "Repository deletion initiated.", + } + + with patch("httpx.AsyncClient") as mock_client: + mock_delete = AsyncMock(return_value=mock_response) + mock_client.return_value.__aenter__.return_value.delete = mock_delete + + await storage.delete_repo(id="test-repo", ttl=custom_ttl) + + # Verify JWT has correct TTL + call_kwargs = mock_delete.call_args[1] + headers = call_kwargs["headers"] + token = headers["Authorization"].replace("Bearer ", "") + payload = jwt.decode(token, options={"verify_signature": False}) + assert payload["exp"] - payload["iat"] == custom_ttl + + def test_create_client_factory(self, git_storage_options: dict) -> None: + """Test create_client factory function.""" + client = create_client(git_storage_options) + assert isinstance(client, GitStorage) + + +class TestJWTGeneration: + """Tests for JWT generation.""" + + @pytest.mark.asyncio + async def test_jwt_structure(self, git_storage_options: dict, test_key: str) -> None: + """Test JWT has correct structure.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.is_success = True + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=mock_response + ) + + repo = await storage.create_repo(id="test-repo") + url = await repo.get_remote_url() + + # Extract JWT from URL + import re + + match = re.search(r"https://t:(.+)@test\.code\.storage/test-repo\.git", url) + assert match is not None + token = match.group(1) + + # Decode JWT (without verification for testing) + payload = jwt.decode(token, options={"verify_signature": False}) + + assert payload["iss"] == "test-customer" + assert payload["sub"] == "@pierre/storage" + assert payload["repo"] == "test-repo" + assert "scopes" in payload + assert "iat" in payload + assert "exp" in payload + assert payload["exp"] > payload["iat"] + + @pytest.mark.asyncio + async def test_jwt_default_permissions(self, git_storage_options: dict) -> None: + """Test JWT has default permissions.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.is_success = True + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=mock_response + ) + + repo = await storage.create_repo(id="test-repo") + url = await repo.get_remote_url() + + # Extract and decode JWT + import re + + match = re.search(r"https://t:(.+)@test\.code\.storage/test-repo\.git", url) + token = match.group(1) + payload = jwt.decode(token, options={"verify_signature": False}) + + assert payload["scopes"] == ["git:write", "git:read"] + + @pytest.mark.asyncio + async def test_jwt_custom_permissions(self, git_storage_options: dict) -> None: + """Test JWT with custom permissions.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.is_success = True + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=mock_response + ) + + repo = await storage.create_repo(id="test-repo") + url = await repo.get_remote_url(permissions=["git:read"], ttl=3600) + + # Extract and decode JWT + import re + + match = re.search(r"https://t:(.+)@test\.code\.storage/test-repo\.git", url) + token = match.group(1) + payload = jwt.decode(token, options={"verify_signature": False}) + + assert payload["scopes"] == ["git:read"] + assert payload["exp"] - payload["iat"] == 3600 + + @pytest.mark.asyncio + async def test_get_ephemeral_remote_url(self, git_storage_options: dict) -> None: + """Test getting ephemeral remote URL.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.is_success = True + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=mock_response + ) + + repo = await storage.create_repo(id="test-repo") + url = await repo.get_ephemeral_remote_url() + + # Verify URL has +ephemeral.git suffix + assert url.endswith("+ephemeral.git") + assert "test-repo+ephemeral.git" in url + + @pytest.mark.asyncio + async def test_get_ephemeral_remote_url_with_permissions( + self, git_storage_options: dict + ) -> None: + """Test ephemeral remote URL with custom permissions.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.is_success = True + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=mock_response + ) + + repo = await storage.create_repo(id="test-repo") + url = await repo.get_ephemeral_remote_url(permissions=["git:read"], ttl=3600) + + # Verify URL structure + assert url.endswith("+ephemeral.git") + + # Extract and decode JWT + import re + + match = re.search(r"https://t:(.+)@test\.code\.storage/test-repo\+ephemeral\.git", url) + assert match is not None + token = match.group(1) + payload = jwt.decode(token, options={"verify_signature": False}) + + assert payload["scopes"] == ["git:read"] + assert payload["exp"] - payload["iat"] == 3600 + + @pytest.mark.asyncio + async def test_ephemeral_url_structure(self, git_storage_options: dict) -> None: + """Test that get_ephemeral_remote_url has correct URL structure.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.is_success = True + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=mock_response + ) + + repo = await storage.create_repo(id="test-repo") + ephemeral_url = await repo.get_ephemeral_remote_url(permissions=["git:write"], ttl=1800) + + # Verify URL structure + import re + + match = re.search( + r"https://t:(.+)@test\.code\.storage/test-repo\+ephemeral\.git", ephemeral_url + ) + assert match is not None, f"URL doesn't match expected pattern: {ephemeral_url}" + + # Verify JWT has correct scopes and TTL + token = match.group(1) + payload = jwt.decode(token, options={"verify_signature": False}) + assert payload["scopes"] == ["git:write"] + assert payload["exp"] - payload["iat"] == 1800 + + +class TestPublicJWTHelper: + """Tests for publicly exported generate_jwt function.""" + + def test_generate_jwt_basic(self, test_key: str) -> None: + """Test basic JWT generation with public helper.""" + token = generate_jwt( + key_pem=test_key, + issuer="test-customer", + repo_id="test-repo", + ) + + # Decode and verify structure + payload = jwt.decode(token, options={"verify_signature": False}) + + assert payload["iss"] == "test-customer" + assert payload["sub"] == "@pierre/storage" + assert payload["repo"] == "test-repo" + assert payload["scopes"] == ["git:write", "git:read"] + assert "iat" in payload + assert "exp" in payload + + def test_generate_jwt_with_custom_scopes(self, test_key: str) -> None: + """Test JWT generation with custom scopes.""" + token = generate_jwt( + key_pem=test_key, + issuer="test-customer", + repo_id="test-repo", + scopes=["git:read"], + ) + + payload = jwt.decode(token, options={"verify_signature": False}) + assert payload["scopes"] == ["git:read"] + + def test_generate_jwt_with_custom_ttl(self, test_key: str) -> None: + """Test JWT generation with custom TTL.""" + ttl = 3600 + token = generate_jwt( + key_pem=test_key, + issuer="test-customer", + repo_id="test-repo", + ttl=ttl, + ) + + payload = jwt.decode(token, options={"verify_signature": False}) + assert payload["exp"] - payload["iat"] == ttl + + def test_generate_jwt_with_all_parameters(self, test_key: str) -> None: + """Test JWT generation with all parameters specified.""" + token = generate_jwt( + key_pem=test_key, + issuer="my-company", + repo_id="my-repo-123", + scopes=["git:write", "git:read", "repo:write"], + ttl=7200, + ) + + payload = jwt.decode(token, options={"verify_signature": False}) + + assert payload["iss"] == "my-company" + assert payload["repo"] == "my-repo-123" + assert payload["scopes"] == ["git:write", "git:read", "repo:write"] + assert payload["exp"] - payload["iat"] == 7200 + + def test_generate_jwt_default_ttl(self, test_key: str) -> None: + """Test JWT generation uses 1 year default TTL.""" + token = generate_jwt( + key_pem=test_key, + issuer="test-customer", + repo_id="test-repo", + ) + + payload = jwt.decode(token, options={"verify_signature": False}) + # Default TTL is 1 year (31536000 seconds) + assert payload["exp"] - payload["iat"] == 31536000 + + def test_generate_jwt_invalid_key(self) -> None: + """Test JWT generation with invalid key.""" + with pytest.raises(ValueError, match="Failed to load private key"): + generate_jwt( + key_pem="invalid-key", + issuer="test-customer", + repo_id="test-repo", + ) + + def test_generate_jwt_signature_valid(self, test_key: str) -> None: + """Test that generated JWT signature can be verified.""" + from cryptography.hazmat.primitives import serialization + + # Generate token + token = generate_jwt( + key_pem=test_key, + issuer="test-customer", + repo_id="test-repo", + ) + + # Load public key for verification + private_key = serialization.load_pem_private_key( + test_key.encode("utf-8"), + password=None, + ) + public_key = private_key.public_key() + public_pem = public_key.public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + + # Verify signature + payload = jwt.decode( + token, + public_pem, + algorithms=["ES256"], + ) + + assert payload["iss"] == "test-customer" + assert payload["repo"] == "test-repo" + + +class TestAPIURLConstruction: + """Tests for API URL construction with organization names.""" + + def test_get_default_api_base_url(self) -> None: + """Test that get_default_api_base_url inserts org name correctly.""" + url = GitStorage.get_default_api_base_url("test-org") + assert url == "https://api.test-org.code.storage" + + url2 = GitStorage.get_default_api_base_url("production") + assert url2 == "https://api.production.code.storage" + + def test_get_default_storage_base_url(self) -> None: + """Test that get_default_storage_base_url inserts org name correctly.""" + url = GitStorage.get_default_storage_base_url("test-org") + assert url == "test-org.code.storage" + + url2 = GitStorage.get_default_storage_base_url("production") + assert url2 == "production.code.storage" + + @pytest.mark.asyncio + async def test_api_requests_use_correct_url_with_org_name( + self, git_storage_options: dict, test_key: str + ) -> None: + """Test that API requests are made to URLs with org name inserted.""" + # Create storage with name "test-customer" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.json = MagicMock( + return_value={"repo_id": "test-repo", "url": "https://example.com/repo.git"} + ) + mock_response.status_code = 200 + mock_response.is_success = True + + with patch("httpx.AsyncClient") as mock_client: + mock_post = AsyncMock(return_value=mock_response) + mock_client.return_value.__aenter__.return_value.post = mock_post + + await storage.create_repo(id="test-repo") + + # Verify the POST request was made to the correct API URL + # The URL should be https://api.test.code.storage (from fixture) + mock_post.assert_called_once() + call_args = mock_post.call_args[0] + assert len(call_args) > 0 + api_url = call_args[0] + assert api_url == "https://api.test.code.storage/api/v1/repos" + + @pytest.mark.asyncio + async def test_api_requests_with_default_url_uses_org_name(self, test_key: str) -> None: + """Test that API requests use org name when no custom URL is provided.""" + # Create storage without custom api_base_url + storage = GitStorage({"name": "my-org", "key": test_key}) + + mock_response = MagicMock() + mock_response.json = MagicMock( + return_value={"repo_id": "test-repo", "url": "https://example.com/repo.git"} + ) + mock_response.status_code = 200 + mock_response.is_success = True + + with patch("httpx.AsyncClient") as mock_client: + mock_post = AsyncMock(return_value=mock_response) + mock_client.return_value.__aenter__.return_value.post = mock_post + + await storage.create_repo(id="test-repo") + + # Verify the POST request was made with org name in URL + mock_post.assert_called_once() + call_args = mock_post.call_args[0] + api_url = call_args[0] + # Should be https://api.my-org.code.storage when using defaults + assert api_url == "https://api.my-org.code.storage/api/v1/repos" + + @pytest.mark.asyncio + async def test_custom_api_url_overrides_default(self, test_key: str) -> None: + """Test that custom API URL takes precedence over default with org name.""" + custom_url = "https://custom-api.example.com" + storage = GitStorage({"name": "my-org", "key": test_key, "api_base_url": custom_url}) + + mock_response = MagicMock() + mock_response.json = MagicMock( + return_value={"repo_id": "test-repo", "url": "https://example.com/repo.git"} + ) + mock_response.status_code = 200 + mock_response.is_success = True + + with patch("httpx.AsyncClient") as mock_client: + mock_post = AsyncMock(return_value=mock_response) + mock_client.return_value.__aenter__.return_value.post = mock_post + + await storage.create_repo(id="test-repo") + + # Verify the POST request uses custom URL, not default with org name + mock_post.assert_called_once() + call_args = mock_post.call_args[0] + api_url = call_args[0] + assert api_url == f"{custom_url}/api/v1/repos" + + +class TestCodeStorageAgentHeader: + """Tests for Code-Storage-Agent header in API requests.""" + + @pytest.mark.asyncio + async def test_create_repo_includes_agent_header(self, git_storage_options: dict) -> None: + """Test that createRepo includes Code-Storage-Agent header.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.json = MagicMock( + return_value={"repo_id": "test-repo", "url": "https://example.com/repo.git"} + ) + mock_response.status_code = 200 + mock_response.is_success = True + + with patch("httpx.AsyncClient") as mock_client: + mock_post = AsyncMock(return_value=mock_response) + mock_client.return_value.__aenter__.return_value.post = mock_post + + await storage.create_repo(id="test-repo") + + # Verify headers include Code-Storage-Agent + mock_post.assert_called_once() + call_kwargs = mock_post.call_args[1] + headers = call_kwargs["headers"] + assert "Code-Storage-Agent" in headers + assert headers["Code-Storage-Agent"] == get_user_agent() + + @pytest.mark.asyncio + async def test_find_one_includes_agent_header(self, git_storage_options: dict) -> None: + """Test that findOne includes Code-Storage-Agent header.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.json = MagicMock( + return_value={"repo_id": "test-repo", "url": "https://example.com/repo.git"} + ) + mock_response.status_code = 200 + mock_response.is_success = True + + with patch("httpx.AsyncClient") as mock_client: + mock_get = AsyncMock(return_value=mock_response) + mock_client.return_value.__aenter__.return_value.get = mock_get + + await storage.find_one(id="test-repo") + + # Verify headers include Code-Storage-Agent + mock_get.assert_called_once() + call_kwargs = mock_get.call_args[1] + headers = call_kwargs["headers"] + assert "Code-Storage-Agent" in headers + assert headers["Code-Storage-Agent"] == get_user_agent() diff --git a/packages/git-storage-sdk-python/tests/test_commit.py b/packages/git-storage-sdk-python/tests/test_commit.py new file mode 100644 index 000000000..8b76e60d9 --- /dev/null +++ b/packages/git-storage-sdk-python/tests/test_commit.py @@ -0,0 +1,678 @@ +"""Tests for CommitBuilder.""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from pierre_storage import GitStorage +from pierre_storage.version import get_user_agent +from pierre_storage.errors import RefUpdateError + + +class TestCommitBuilder: + """Tests for CommitBuilder operations.""" + + @pytest.mark.asyncio + async def test_create_commit_with_string_file(self, git_storage_options: dict) -> None: + """Test creating commit with string file.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + # Mock the streaming response + stream_response = MagicMock() + stream_response.is_success = True + stream_response.aread = AsyncMock( + return_value=b'{"commit":{"commit_sha":"abc123","tree_sha":"def456","target_branch":"main","pack_bytes":1024,"blob_count":1},"result":{"success":true,"status":"ok","branch":"main","old_sha":"000000","new_sha":"abc123"}}' + ) + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(return_value=create_response) + # Mock stream() to return an async context manager + stream_context = MagicMock() + stream_context.__aenter__ = AsyncMock(return_value=stream_response) + stream_context.__aexit__ = AsyncMock(return_value=None) + client_instance.stream = MagicMock(return_value=stream_context) + + repo = await storage.create_repo(id="test-repo") + result = await ( + repo.create_commit( + target_branch="main", + commit_message="Add README", + author={"name": "Test", "email": "test@example.com"}, + ) + .add_file_from_string("README.md", "# Hello World") + .send() + ) + + assert result is not None + assert result["commit_sha"] == "abc123" + assert result["tree_sha"] == "def456" + assert result["target_branch"] == "main" + assert result["ref_update"]["branch"] == "main" + assert result["ref_update"]["new_sha"] == "abc123" + + @pytest.mark.asyncio + async def test_create_commit_with_bytes(self, git_storage_options: dict) -> None: + """Test creating commit with byte content.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + # Mock the streaming response + stream_response = MagicMock() + stream_response.is_success = True + stream_response.aread = AsyncMock( + return_value=b'{"commit":{"commit_sha":"xyz789","tree_sha":"uvw456","target_branch":"main","pack_bytes":2048,"blob_count":1},"result":{"success":true,"status":"ok","branch":"main","old_sha":"abc123","new_sha":"xyz789"}}' + ) + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(return_value=create_response) + # Mock stream() to return an async context manager + stream_context = MagicMock() + stream_context.__aenter__ = AsyncMock(return_value=stream_response) + stream_context.__aexit__ = AsyncMock(return_value=None) + client_instance.stream = MagicMock(return_value=stream_context) + + repo = await storage.create_repo(id="test-repo") + result = await ( + repo.create_commit( + target_branch="main", + commit_message="Add binary file", + author={"name": "Test", "email": "test@example.com"}, + ) + .add_file("data.bin", b"\x00\x01\x02\x03") + .send() + ) + + assert result is not None + assert result["commit_sha"] == "xyz789" + + @pytest.mark.asyncio + async def test_create_commit_with_multiple_files(self, git_storage_options: dict) -> None: + """Test creating commit with multiple files.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + # Mock the streaming response + stream_response = MagicMock() + stream_response.is_success = True + stream_response.aread = AsyncMock( + return_value=b'{"commit":{"commit_sha":"multi123","tree_sha":"multi456","target_branch":"main","pack_bytes":4096,"blob_count":3},"result":{"success":true,"status":"ok","branch":"main","old_sha":"old123","new_sha":"multi123"}}' + ) + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(return_value=create_response) + # Mock stream() to return an async context manager + stream_context = MagicMock() + stream_context.__aenter__ = AsyncMock(return_value=stream_response) + stream_context.__aexit__ = AsyncMock(return_value=None) + client_instance.stream = MagicMock(return_value=stream_context) + + repo = await storage.create_repo(id="test-repo") + result = await ( + repo.create_commit( + target_branch="main", + commit_message="Multiple files", + author={"name": "Test", "email": "test@example.com"}, + ) + .add_file_from_string("README.md", "# Project") + .add_file_from_string("package.json", '{"name":"test"}') + .add_file("data.bin", b"\x00\x01") + .send() + ) + + assert result is not None + assert result["blob_count"] == 3 + + @pytest.mark.asyncio + async def test_create_commit_with_delete(self, git_storage_options: dict) -> None: + """Test creating commit with file deletion.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + # Mock the streaming response + stream_response = MagicMock() + stream_response.is_success = True + stream_response.aread = AsyncMock( + return_value=b'{"commit":{"commit_sha":"del123","tree_sha":"del456","target_branch":"main","pack_bytes":512,"blob_count":0},"result":{"success":true,"status":"ok","branch":"main","old_sha":"old123","new_sha":"del123"}}' + ) + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(return_value=create_response) + # Mock stream() to return an async context manager + stream_context = MagicMock() + stream_context.__aenter__ = AsyncMock(return_value=stream_response) + stream_context.__aexit__ = AsyncMock(return_value=None) + client_instance.stream = MagicMock(return_value=stream_context) + + repo = await storage.create_repo(id="test-repo") + result = await ( + repo.create_commit( + target_branch="main", + commit_message="Delete old file", + author={"name": "Test", "email": "test@example.com"}, + ) + .delete_path("old-file.txt") + .send() + ) + + assert result is not None + assert result["commit_sha"] == "del123" + + @pytest.mark.asyncio + async def test_create_commit_with_expected_head(self, git_storage_options: dict) -> None: + """Test creating commit with expected head SHA.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + # Mock the streaming response + stream_response = MagicMock() + stream_response.is_success = True + stream_response.aread = AsyncMock( + return_value=b'{"commit":{"commit_sha":"new123","tree_sha":"new456","target_branch":"main","pack_bytes":1024,"blob_count":1},"result":{"success":true,"status":"ok","branch":"main","old_sha":"expected123","new_sha":"new123"}}' + ) + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(return_value=create_response) + # Mock stream() to return an async context manager + stream_context = MagicMock() + stream_context.__aenter__ = AsyncMock(return_value=stream_response) + stream_context.__aexit__ = AsyncMock(return_value=None) + client_instance.stream = MagicMock(return_value=stream_context) + + repo = await storage.create_repo(id="test-repo") + result = await ( + repo.create_commit( + target_branch="main", + expected_head_sha="expected123", + commit_message="Safe update", + author={"name": "Test", "email": "test@example.com"}, + ) + .add_file_from_string("file.txt", "content") + .send() + ) + + assert result is not None + assert result["ref_update"]["old_sha"] == "expected123" + + @pytest.mark.asyncio + async def test_create_commit_ref_update_failed(self, git_storage_options: dict) -> None: + """Test handling ref update failure.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + # Mock the streaming response + stream_response = MagicMock() + stream_response.is_success = True + stream_response.aread = AsyncMock( + return_value=b'{"commit":{"commit_sha":"fail123","tree_sha":"fail456","target_branch":"main","pack_bytes":1024,"blob_count":1},"result":{"success":false,"status":"rejected","reason":"conflict","branch":"main","old_sha":"old123","new_sha":"fail123"}}' + ) + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(return_value=create_response) + # Mock stream() to return an async context manager + stream_context = MagicMock() + stream_context.__aenter__ = AsyncMock(return_value=stream_response) + stream_context.__aexit__ = AsyncMock(return_value=None) + client_instance.stream = MagicMock(return_value=stream_context) + + repo = await storage.create_repo(id="test-repo") + + with pytest.raises(RefUpdateError) as exc_info: + await ( + repo.create_commit( + target_branch="main", + commit_message="Should fail", + author={"name": "Test", "email": "test@example.com"}, + ) + .add_file_from_string("file.txt", "content") + .send() + ) + + assert exc_info.value.status == "rejected" + assert ( + exc_info.value.reason == "rejected" + ) # reason defaults to status when not provided + + @pytest.mark.asyncio + async def test_create_commit_with_custom_encoding(self, git_storage_options: dict) -> None: + """Test creating commit with custom text encoding.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + # Mock the streaming response + stream_response = MagicMock() + stream_response.is_success = True + stream_response.aread = AsyncMock( + return_value=b'{"commit":{"commit_sha":"enc123","tree_sha":"enc456","target_branch":"main","pack_bytes":1024,"blob_count":1},"result":{"success":true,"status":"ok","branch":"main","old_sha":"000000","new_sha":"enc123"}}' + ) + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(return_value=create_response) + # Mock stream() to return an async context manager + stream_context = MagicMock() + stream_context.__aenter__ = AsyncMock(return_value=stream_response) + stream_context.__aexit__ = AsyncMock(return_value=None) + client_instance.stream = MagicMock(return_value=stream_context) + + repo = await storage.create_repo(id="test-repo") + result = await ( + repo.create_commit( + target_branch="main", + commit_message="Latin-1 file", + author={"name": "Test", "email": "test@example.com"}, + ) + .add_file_from_string("file.txt", "café", encoding="latin-1") + .send() + ) + + assert result is not None + assert result["commit_sha"] == "enc123" + + @pytest.mark.asyncio + async def test_create_commit_with_committer(self, git_storage_options: dict) -> None: + """Test creating commit with separate committer.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + # Mock the streaming response + stream_response = MagicMock() + stream_response.is_success = True + stream_response.aread = AsyncMock( + return_value=b'{"commit":{"commit_sha":"com123","tree_sha":"com456","target_branch":"main","pack_bytes":1024,"blob_count":1},"result":{"success":true,"status":"ok","branch":"main","old_sha":"000000","new_sha":"com123"}}' + ) + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(return_value=create_response) + # Mock stream() to return an async context manager + stream_context = MagicMock() + stream_context.__aenter__ = AsyncMock(return_value=stream_response) + stream_context.__aexit__ = AsyncMock(return_value=None) + client_instance.stream = MagicMock(return_value=stream_context) + + repo = await storage.create_repo(id="test-repo") + result = await ( + repo.create_commit( + target_branch="main", + commit_message="Authored by one, committed by another", + author={"name": "Author", "email": "author@example.com"}, + committer={"name": "Committer", "email": "committer@example.com"}, + ) + .add_file_from_string("file.txt", "content") + .send() + ) + + assert result is not None + assert result["commit_sha"] == "com123" + + @pytest.mark.asyncio + async def test_create_commit_with_base_branch(self, git_storage_options: dict) -> None: + """Test creating commit with base_branch metadata.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + # Mock the streaming response + stream_response = MagicMock() + stream_response.is_success = True + stream_response.aread = AsyncMock( + return_value=b'{"commit":{"commit_sha":"deadbeef","tree_sha":"cafebabe","target_branch":"feature/one","pack_bytes":1,"blob_count":1},"result":{"success":true,"status":"ok","branch":"feature/one","old_sha":"0000000000000000000000000000000000000000","new_sha":"deadbeef"}}' + ) + + # Capture the request to verify base_branch is included + captured_body = None + + def capture_stream(*args, **kwargs): + nonlocal captured_body + content = kwargs.get("content") + + async def capture_content(): + nonlocal captured_body + if content: + chunks = [] + async for chunk in content: + chunks.append(chunk) + captured_body = b"".join(chunks).decode("utf-8") + + # Create stream context that will capture content + stream_context = MagicMock() + + async def aenter_handler(*args, **kwargs): + await capture_content() + return stream_response + + stream_context.__aenter__ = AsyncMock(side_effect=aenter_handler) + stream_context.__aexit__ = AsyncMock(return_value=None) + return stream_context + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(return_value=create_response) + client_instance.stream = capture_stream + + repo = await storage.create_repo(id="test-repo") + result = await ( + repo.create_commit( + target_branch="feature/one", + base_branch="main", + expected_head_sha="abc123", + commit_message="branch off main", + author={"name": "Author", "email": "author@example.com"}, + ) + .add_file_from_string("docs/base.txt", "hello") + .send() + ) + + assert result is not None + assert result["commit_sha"] == "deadbeef" + + # Verify metadata includes base_branch + assert captured_body is not None + import json + + metadata_line = captured_body.split("\n")[0] + metadata = json.loads(metadata_line)["metadata"] + assert metadata["base_branch"] == "main" + assert metadata["expected_head_sha"] == "abc123" + assert metadata["target_branch"] == "feature/one" + + @pytest.mark.asyncio + async def test_create_commit_base_branch_without_expected_head( + self, git_storage_options: dict + ) -> None: + """Test creating commit with base_branch but without expected_head_sha.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + # Mock the streaming response + stream_response = MagicMock() + stream_response.is_success = True + stream_response.aread = AsyncMock( + return_value=b'{"commit":{"commit_sha":"abc123","tree_sha":"def456","target_branch":"feature/one","pack_bytes":1,"blob_count":1},"result":{"success":true,"status":"ok","branch":"feature/one","old_sha":"0000000000000000000000000000000000000000","new_sha":"abc123"}}' + ) + + # Capture the request to verify base_branch is included + captured_body = None + + def capture_stream(*args, **kwargs): + nonlocal captured_body + content = kwargs.get("content") + + async def capture_content(): + nonlocal captured_body + if content: + chunks = [] + async for chunk in content: + chunks.append(chunk) + captured_body = b"".join(chunks).decode("utf-8") + + # Create stream context that will capture content + stream_context = MagicMock() + + async def aenter_handler(*args, **kwargs): + await capture_content() + return stream_response + + stream_context.__aenter__ = AsyncMock(side_effect=aenter_handler) + stream_context.__aexit__ = AsyncMock(return_value=None) + return stream_context + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(return_value=create_response) + client_instance.stream = capture_stream + + repo = await storage.create_repo(id="test-repo") + result = await ( + repo.create_commit( + target_branch="feature/one", + base_branch="main", + commit_message="branch off", + author={"name": "Author", "email": "author@example.com"}, + ) + .add_file_from_string("docs/base.txt", "hello") + .send() + ) + + assert result is not None + assert result["commit_sha"] == "abc123" + + # Verify metadata includes base_branch but not expected_head_sha + assert captured_body is not None + import json + + metadata_line = captured_body.split("\n")[0] + metadata = json.loads(metadata_line)["metadata"] + assert metadata["base_branch"] == "main" + assert "expected_head_sha" not in metadata + + @pytest.mark.asyncio + async def test_create_commit_ephemeral_flags_included_in_metadata( + self, git_storage_options: dict + ) -> None: + """Ensure ephemeral options are forwarded in metadata.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + stream_response = MagicMock() + stream_response.is_success = True + stream_response.aread = AsyncMock( + return_value=b'{"commit":{"commit_sha":"eph123","tree_sha":"eph456","target_branch":"feature/demo","pack_bytes":1,"blob_count":1},"result":{"success":true,"status":"ok","branch":"feature/demo","old_sha":"0000000000000000000000000000000000000000","new_sha":"eph123"}}' + ) + + captured_body = None + + def capture_stream(*args, **kwargs): + nonlocal captured_body + content = kwargs.get("content") + + async def capture_content(): + nonlocal captured_body + if content: + chunks = [] + async for chunk in content: + chunks.append(chunk) + captured_body = b"".join(chunks).decode("utf-8") + + stream_context = MagicMock() + + async def aenter_handler(*args, **kwargs): + await capture_content() + return stream_response + + stream_context.__aenter__ = AsyncMock(side_effect=aenter_handler) + stream_context.__aexit__ = AsyncMock(return_value=None) + return stream_context + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(return_value=create_response) + client_instance.stream = capture_stream + + repo = await storage.create_repo(id="test-repo") + await ( + repo.create_commit( + target_branch="feature/demo", + base_branch="feature/base", + ephemeral=True, + ephemeral_base=True, + commit_message="ephemeral commit", + author={"name": "Author", "email": "author@example.com"}, + ) + .add_file_from_string("docs/file.txt", "hello") + .send() + ) + + assert captured_body is not None + import json + + metadata_line = captured_body.split("\n")[0] + metadata = json.loads(metadata_line)["metadata"] + assert metadata["ephemeral"] is True + assert metadata["ephemeral_base"] is True + assert metadata["base_branch"] == "feature/base" + + @pytest.mark.asyncio + async def test_create_commit_ephemeral_base_requires_base_branch( + self, git_storage_options: dict + ) -> None: + """ephemeral_base should require base_branch.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(return_value=create_response) + + repo = await storage.create_repo(id="test-repo") + + with pytest.raises(ValueError) as exc_info: + repo.create_commit( + target_branch="feature/demo", + commit_message="missing base branch", + ephemeral_base=True, + author={"name": "Author", "email": "author@example.com"}, + ) + + assert "ephemeral_base requires base_branch" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_create_commit_base_branch_rejects_refs_prefix( + self, git_storage_options: dict + ) -> None: + """Test that base_branch with refs/ prefix is rejected.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(return_value=create_response) + + repo = await storage.create_repo(id="test-repo") + + with pytest.raises(ValueError) as exc_info: + repo.create_commit( + target_branch="feature/two", + base_branch="refs/heads/main", + expected_head_sha="abc123", + commit_message="branch", + author={"name": "Author", "email": "author@example.com"}, + ) + + assert "must not include refs/ prefix" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_create_commit_includes_agent_header(self, git_storage_options: dict) -> None: + """Test that createCommit includes Code-Storage-Agent header.""" + from unittest.mock import AsyncMock, MagicMock, patch + + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.json = AsyncMock( + return_value={"repo_id": "test-repo", "url": "https://example.com/repo.git"} + ) + mock_response.status_code = 200 + mock_response.is_success = True + + # Mock streaming response for commit + stream_response = MagicMock() + stream_response.is_success = True + stream_response.status_code = 200 + stream_response.aread = AsyncMock( + return_value=b'{"commit":{"commit_sha":"abc123","tree_sha":"def456","target_branch":"main","pack_bytes":1024,"blob_count":1},"result":{"success":true,"status":"ok","branch":"main","old_sha":"000000","new_sha":"abc123"}}' + ) + + captured_headers = None + + with patch("httpx.AsyncClient") as mock_client: + # Setup create repo mock + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=mock_response + ) + + # Setup stream mock + stream_context = MagicMock() + stream_context.__aenter__ = AsyncMock(return_value=stream_response) + stream_context.__aexit__ = AsyncMock(return_value=None) + + def capture_stream(*args, **kwargs): + nonlocal captured_headers + captured_headers = kwargs.get("headers") + return stream_context + + mock_client.return_value.__aenter__.return_value.stream = capture_stream + + repo = await storage.create_repo(id="test-repo") + await ( + repo.create_commit( + target_branch="main", + commit_message="Test", + author={"name": "Author", "email": "author@example.com"}, + ) + .add_file_from_string("test.txt", "test") + .send() + ) + + # Verify headers include Code-Storage-Agent + assert captured_headers is not None + assert "Code-Storage-Agent" in captured_headers + assert captured_headers["Code-Storage-Agent"] == get_user_agent() diff --git a/packages/git-storage-sdk-python/tests/test_repo.py b/packages/git-storage-sdk-python/tests/test_repo.py new file mode 100644 index 000000000..abe4591c4 --- /dev/null +++ b/packages/git-storage-sdk-python/tests/test_repo.py @@ -0,0 +1,1415 @@ +"""Tests for Repo operations.""" + +from unittest.mock import AsyncMock, MagicMock, patch +from urllib.parse import parse_qs, urlparse + +import pytest + +from pierre_storage import GitStorage +from pierre_storage.version import get_user_agent +from pierre_storage.errors import ApiError, RefUpdateError + + +class TestRepoFileOperations: + """Tests for file operations.""" + + @pytest.mark.asyncio + async def test_get_file_stream(self, git_storage_options: dict) -> None: + """Test getting file stream.""" + storage = GitStorage(git_storage_options) + + # Mock repo creation + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + # Mock file stream response + file_response = MagicMock() + file_response.status_code = 200 + file_response.is_success = True + file_response.raise_for_status = MagicMock() + file_response.aclose = AsyncMock() + + with patch("httpx.AsyncClient") as mock_client_cls: + create_client = MagicMock() + create_client.__aenter__.return_value.post = AsyncMock(return_value=create_response) + create_client.__aexit__.return_value = False + + stream_client = MagicMock() + stream_context = MagicMock() + stream_context.__aenter__ = AsyncMock(return_value=file_response) + stream_context.__aexit__ = AsyncMock(return_value=False) + stream_client.stream = MagicMock(return_value=stream_context) + stream_client.aclose = AsyncMock() + + mock_client_cls.side_effect = [create_client, stream_client] + + repo = await storage.create_repo(id="test-repo") + response = await repo.get_file_stream(path="README.md", ref="main") + + assert response is not None + assert response.status_code == 200 + await response.aclose() + stream_client.stream.assert_called_once() + file_response.aclose.assert_awaited_once() + stream_client.aclose.assert_awaited_once() + + @pytest.mark.asyncio + async def test_get_file_stream_actual_streaming(self, git_storage_options: dict) -> None: + """Test that file streaming actually works with aiter_bytes.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + # Mock a streaming response with actual content + file_response = MagicMock() + file_response.status_code = 200 + file_response.is_success = True + file_response.raise_for_status = MagicMock() + file_response.aclose = AsyncMock() + + # Mock the async iteration over bytes + async def mock_aiter_bytes(): + yield b"Hello, " + yield b"world!" + + file_response.aiter_bytes = mock_aiter_bytes + + with patch("httpx.AsyncClient") as mock_client_cls: + create_client = MagicMock() + create_client.__aenter__.return_value.post = AsyncMock(return_value=create_response) + create_client.__aexit__.return_value = False + + stream_client = MagicMock() + stream_context = MagicMock() + stream_context.__aenter__ = AsyncMock(return_value=file_response) + stream_context.__aexit__ = AsyncMock(return_value=False) + stream_client.stream = MagicMock(return_value=stream_context) + stream_client.aclose = AsyncMock() + + mock_client_cls.side_effect = [create_client, stream_client] + + repo = await storage.create_repo(id="test-repo") + response = await repo.get_file_stream(path="README.md", ref="main") + + # Actually consume the stream + chunks = [] + async for chunk in response.aiter_bytes(): + chunks.append(chunk) + + content = b"".join(chunks) + assert content == b"Hello, world!" + assert response.status_code == 200 + + await response.aclose() + stream_client.stream.assert_called_once() + + @pytest.mark.asyncio + async def test_get_file_stream_ephemeral_flag(self, git_storage_options: dict) -> None: + """Ensure ephemeral flag propagates to file requests.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + file_response = MagicMock() + file_response.status_code = 200 + file_response.is_success = True + file_response.raise_for_status = MagicMock() + file_response.aclose = AsyncMock() + + with patch("httpx.AsyncClient") as mock_client_cls: + create_client = MagicMock() + create_client.__aenter__.return_value.post = AsyncMock(return_value=create_response) + create_client.__aexit__.return_value = False + + stream_client = MagicMock() + stream_context = MagicMock() + stream_context.__aenter__ = AsyncMock(return_value=file_response) + stream_context.__aexit__ = AsyncMock(return_value=False) + stream_client.stream = MagicMock(return_value=stream_context) + stream_client.aclose = AsyncMock() + + mock_client_cls.side_effect = [create_client, stream_client] + + repo = await storage.create_repo(id="test-repo") + response = await repo.get_file_stream( + path="README.md", + ref="feature/demo", + ephemeral=True, + ) + + assert response.status_code == 200 + called_url = stream_client.stream.call_args.args[1] + parsed = urlparse(called_url) + params = parse_qs(parsed.query) + assert params.get("ephemeral") == ["true"] + assert params.get("ref") == ["feature/demo"] + + await response.aclose() + stream_client.stream.assert_called_once() + file_response.aclose.assert_awaited_once() + stream_client.aclose.assert_awaited_once() + + @pytest.mark.asyncio + async def test_list_files(self, git_storage_options: dict) -> None: + """Test listing files in repository.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + list_response = MagicMock() + list_response.status_code = 200 + list_response.is_success = True + list_response.json.return_value = { + "paths": ["README.md", "src/main.py", "package.json"], + "ref": "main", + } + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=create_response + ) + mock_client.return_value.__aenter__.return_value.get = AsyncMock( + return_value=list_response + ) + + repo = await storage.create_repo(id="test-repo") + result = await repo.list_files(ref="main") + + assert result is not None + assert "paths" in result + assert len(result["paths"]) == 3 + assert "README.md" in result["paths"] + + @pytest.mark.asyncio + async def test_list_files_ephemeral_flag(self, git_storage_options: dict) -> None: + """Ensure ephemeral flag propagates to list files.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + list_response = MagicMock() + list_response.status_code = 200 + list_response.is_success = True + list_response.json.return_value = { + "paths": ["README.md"], + "ref": "refs/namespaces/ephemeral/refs/heads/feature/demo", + } + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(return_value=create_response) + client_instance.get = AsyncMock(return_value=list_response) + + repo = await storage.create_repo(id="test-repo") + result = await repo.list_files(ref="feature/demo", ephemeral=True) + + assert result["paths"] == ["README.md"] + assert result["ref"] == "refs/namespaces/ephemeral/refs/heads/feature/demo" + called_url = client_instance.get.call_args.args[0] + parsed = urlparse(called_url) + params = parse_qs(parsed.query) + assert params.get("ephemeral") == ["true"] + assert params.get("ref") == ["feature/demo"] + + @pytest.mark.asyncio + async def test_grep_posts_body_and_parses_response(self, git_storage_options: dict) -> None: + """Test grep request body and response parsing.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + grep_response = MagicMock() + grep_response.status_code = 200 + grep_response.is_success = True + grep_response.raise_for_status = MagicMock() + grep_response.json.return_value = { + "query": {"pattern": "SEARCHME", "case_sensitive": False}, + "repo": {"ref": "main", "commit": "deadbeef"}, + "matches": [ + { + "path": "src/a.ts", + "lines": [{"line_number": 12, "text": "SEARCHME", "type": "match"}], + } + ], + "next_cursor": None, + "has_more": False, + } + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(side_effect=[create_response, grep_response]) + + repo = await storage.create_repo(id="test-repo") + result = await repo.grep( + pattern="SEARCHME", + ref="main", + paths=["src/"], + case_sensitive=False, + file_filters={"include_globs": ["**/*.ts"], "exclude_globs": ["**/vendor/**"]}, + context={"before": 1, "after": 2}, + limits={"max_lines": 5, "max_matches_per_file": 7}, + pagination={"cursor": "abc", "limit": 3}, + ) + + assert result["query"]["pattern"] == "SEARCHME" + assert result["query"]["case_sensitive"] is False + assert result["repo"]["ref"] == "main" + assert result["repo"]["commit"] == "deadbeef" + assert result["matches"][0]["path"] == "src/a.ts" + assert result["matches"][0]["lines"][0]["line_number"] == 12 + assert result["matches"][0]["lines"][0]["text"] == "SEARCHME" + assert result["next_cursor"] is None + assert result["has_more"] is False + + _, kwargs = client_instance.post.call_args + assert kwargs["json"]["rev"] == "main" + assert kwargs["json"]["paths"] == ["src/"] + assert kwargs["json"]["query"] == {"pattern": "SEARCHME", "case_sensitive": False} + assert kwargs["json"]["file_filters"] == { + "include_globs": ["**/*.ts"], + "exclude_globs": ["**/vendor/**"], + } + assert kwargs["json"]["context"] == {"before": 1, "after": 2} + assert kwargs["json"]["limits"] == {"max_lines": 5, "max_matches_per_file": 7} + assert kwargs["json"]["pagination"] == {"cursor": "abc", "limit": 3} + + +class TestRepoBranchOperations: + """Tests for branch operations.""" + + @pytest.mark.asyncio + async def test_list_branches(self, git_storage_options: dict) -> None: + """Test listing branches.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + branches_response = MagicMock() + branches_response.status_code = 200 + branches_response.is_success = True + branches_response.json.return_value = { + "branches": [ + { + "cursor": "c1", + "name": "main", + "head_sha": "abc123", + "created_at": "2025-01-01T00:00:00Z", + }, + { + "cursor": "c2", + "name": "develop", + "head_sha": "def456", + "created_at": "2025-01-02T00:00:00Z", + }, + ], + "next_cursor": None, + "has_more": False, + } + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=create_response + ) + mock_client.return_value.__aenter__.return_value.get = AsyncMock( + return_value=branches_response + ) + + repo = await storage.create_repo(id="test-repo") + result = await repo.list_branches(limit=10) + + assert result is not None + assert "branches" in result + assert len(result["branches"]) == 2 + assert result["branches"][0]["name"] == "main" + + @pytest.mark.asyncio + async def test_list_branches_with_pagination(self, git_storage_options: dict) -> None: + """Test listing branches with pagination cursor.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + branches_response = MagicMock() + branches_response.status_code = 200 + branches_response.is_success = True + branches_response.json.return_value = { + "branches": [ + { + "cursor": "c3", + "name": "feature-1", + "head_sha": "ghi789", + "created_at": "2025-01-03T00:00:00Z", + } + ], + "next_cursor": "next-page-token", + "has_more": True, + } + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=create_response + ) + mock_client.return_value.__aenter__.return_value.get = AsyncMock( + return_value=branches_response + ) + + repo = await storage.create_repo(id="test-repo") + result = await repo.list_branches(limit=1, cursor="some-cursor") + + assert result is not None + assert result["next_cursor"] == "next-page-token" + assert result["has_more"] is True + + @pytest.mark.asyncio + async def test_create_branch(self, git_storage_options: dict) -> None: + """Test creating a branch using the REST API.""" + storage = GitStorage(git_storage_options) + + create_repo_response = MagicMock() + create_repo_response.status_code = 200 + create_repo_response.is_success = True + create_repo_response.json.return_value = {"repo_id": "test-repo"} + + create_branch_response = MagicMock() + create_branch_response.status_code = 200 + create_branch_response.is_success = True + create_branch_response.json.return_value = { + "message": "branch created", + "target_branch": "feature/demo", + "target_is_ephemeral": True, + "commit_sha": "abc123", + } + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock( + side_effect=[create_repo_response, create_branch_response] + ) + + repo = await storage.create_repo(id="test-repo") + result = await repo.create_branch( + base_branch="main", + target_branch="feature/demo", + target_is_ephemeral=True, + ) + + assert result["message"] == "branch created" + assert result["target_branch"] == "feature/demo" + assert result["target_is_ephemeral"] is True + assert result["commit_sha"] == "abc123" + + # Ensure the API call was issued with the expected payload + assert client_instance.post.await_count == 2 + branch_call = client_instance.post.await_args_list[1] + assert branch_call.args[0].endswith("/api/v1/repos/branches/create") + payload = branch_call.kwargs["json"] + assert payload["base_branch"] == "main" + assert payload["target_branch"] == "feature/demo" + assert payload["target_is_ephemeral"] is True + + @pytest.mark.asyncio + async def test_promote_ephemeral_branch_defaults(self, git_storage_options: dict) -> None: + """Test promoting an ephemeral branch with default target branch.""" + storage = GitStorage(git_storage_options) + + create_repo_response = MagicMock() + create_repo_response.status_code = 200 + create_repo_response.is_success = True + create_repo_response.json.return_value = {"repo_id": "test-repo"} + + promote_response = MagicMock() + promote_response.status_code = 200 + promote_response.is_success = True + promote_response.json.return_value = { + "message": "branch promoted", + "target_branch": "ephemeral/demo", + "target_is_ephemeral": False, + "commit_sha": "def456", + } + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(side_effect=[create_repo_response, promote_response]) + + repo = await storage.create_repo(id="test-repo") + result = await repo.promote_ephemeral_branch(base_branch="ephemeral/demo") + + assert result["message"] == "branch promoted" + assert result["target_branch"] == "ephemeral/demo" + assert result["target_is_ephemeral"] is False + assert result["commit_sha"] == "def456" + + assert client_instance.post.await_count == 2 + branch_call = client_instance.post.await_args_list[1] + assert branch_call.args[0].endswith("/api/v1/repos/branches/create") + payload = branch_call.kwargs["json"] + assert payload["base_branch"] == "ephemeral/demo" + assert payload["target_branch"] == "ephemeral/demo" + assert payload["base_is_ephemeral"] is True + assert payload["target_is_ephemeral"] is False + + @pytest.mark.asyncio + async def test_promote_ephemeral_branch_custom_target( + self, + git_storage_options: dict, + ) -> None: + """Test promoting an ephemeral branch to a custom target branch.""" + storage = GitStorage(git_storage_options) + + create_repo_response = MagicMock() + create_repo_response.status_code = 200 + create_repo_response.is_success = True + create_repo_response.json.return_value = {"repo_id": "test-repo"} + + promote_response = MagicMock() + promote_response.status_code = 200 + promote_response.is_success = True + promote_response.json.return_value = { + "message": "branch promoted", + "target_branch": "feature/final-demo", + "target_is_ephemeral": False, + } + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(side_effect=[create_repo_response, promote_response]) + + repo = await storage.create_repo(id="test-repo") + result = await repo.promote_ephemeral_branch( + base_branch="ephemeral/demo", + target_branch="feature/final-demo", + ) + + assert result["target_branch"] == "feature/final-demo" + assert result["target_is_ephemeral"] is False + + assert client_instance.post.await_count == 2 + branch_call = client_instance.post.await_args_list[1] + payload = branch_call.kwargs["json"] + assert payload["base_branch"] == "ephemeral/demo" + assert payload["target_branch"] == "feature/final-demo" + assert payload["base_is_ephemeral"] is True + assert payload["target_is_ephemeral"] is False + + @pytest.mark.asyncio + async def test_create_branch_conflict(self, git_storage_options: dict) -> None: + """Test create_branch surfaces API errors.""" + storage = GitStorage(git_storage_options) + + create_repo_response = MagicMock() + create_repo_response.status_code = 200 + create_repo_response.is_success = True + create_repo_response.json.return_value = {"repo_id": "test-repo"} + + conflict_response = MagicMock() + conflict_response.status_code = 409 + conflict_response.is_success = False + conflict_response.json.return_value = {"message": "branch already exists"} + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(side_effect=[create_repo_response, conflict_response]) + + repo = await storage.create_repo(id="test-repo") + + with pytest.raises(ApiError) as exc_info: + await repo.create_branch( + base_branch="main", + target_branch="feature/demo", + ) + + assert exc_info.value.status_code == 409 + assert "branch already exists" in str(exc_info.value) + + +class TestRepoCommitOperations: + """Tests for commit operations.""" + + @pytest.mark.asyncio + async def test_list_commits(self, git_storage_options: dict) -> None: + """Test listing commits.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + commits_response = MagicMock() + commits_response.status_code = 200 + commits_response.is_success = True + commits_response.json.return_value = { + "commits": [ + { + "sha": "abc123", + "message": "Initial commit", + "author_name": "Test User", + "author_email": "test@example.com", + "committer_name": "Test User", + "committer_email": "test@example.com", + "date": "2025-01-01T00:00:00Z", + }, + { + "sha": "def456", + "message": "Second commit", + "author_name": "Test User", + "author_email": "test@example.com", + "committer_name": "Test User", + "committer_email": "test@example.com", + "date": "2025-01-02T00:00:00Z", + }, + ], + "next_cursor": None, + "has_more": False, + } + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=create_response + ) + mock_client.return_value.__aenter__.return_value.get = AsyncMock( + return_value=commits_response + ) + + repo = await storage.create_repo(id="test-repo") + result = await repo.list_commits(branch="main", limit=10) + + assert result is not None + assert "commits" in result + assert len(result["commits"]) == 2 + assert result["commits"][0]["sha"] == "abc123" + assert result["commits"][0]["message"] == "Initial commit" + + @pytest.mark.asyncio + async def test_restore_commit(self, git_storage_options: dict) -> None: + """Test restoring to a previous commit.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + restore_response = MagicMock() + restore_response.status_code = 200 + restore_response.is_success = True + restore_response.json.return_value = { + "commit": { + "commit_sha": "new-commit-sha", + "tree_sha": "new-tree-sha", + "target_branch": "main", + "pack_bytes": 1024, + "blob_count": 0, + }, + "result": { + "success": True, + "branch": "main", + "old_sha": "old-sha", + "new_sha": "new-commit-sha", + "status": "ok", + }, + } + + with patch("httpx.AsyncClient") as mock_client: + # Mock both create and restore + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + side_effect=[create_response, restore_response] + ) + + repo = await storage.create_repo(id="test-repo") + result = await repo.restore_commit( + target_branch="main", + target_commit_sha="abc123", + commit_message="Restore commit", + author={"name": "Test", "email": "test@example.com"}, + ) + + assert result is not None + assert result["commit_sha"] == "new-commit-sha" + assert result["ref_update"]["branch"] == "main" + assert result["ref_update"]["new_sha"] == "new-commit-sha" + assert result["ref_update"]["old_sha"] == "old-sha" + + +class TestRepoNoteOperations: + """Tests for git note operations.""" + + @pytest.mark.asyncio + async def test_get_note(self, git_storage_options: dict) -> None: + """Test reading a git note.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + note_response = MagicMock() + note_response.status_code = 200 + note_response.is_success = True + note_response.raise_for_status = MagicMock() + note_response.json.return_value = { + "sha": "abc123", + "note": "hello notes", + "ref_sha": "def456", + } + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(return_value=create_response) + client_instance.get = AsyncMock(return_value=note_response) + + repo = await storage.create_repo(id="test-repo") + result = await repo.get_note(sha="abc123") + + assert result["sha"] == "abc123" + assert result["note"] == "hello notes" + assert result["ref_sha"] == "def456" + + @pytest.mark.asyncio + async def test_create_append_delete_note(self, git_storage_options: dict) -> None: + """Test creating, appending, and deleting git notes.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + create_note_response = MagicMock() + create_note_response.status_code = 201 + create_note_response.is_success = True + create_note_response.json.return_value = { + "sha": "abc123", + "target_ref": "refs/notes/commits", + "new_ref_sha": "def456", + "result": {"success": True, "status": "ok"}, + } + + append_note_response = MagicMock() + append_note_response.status_code = 200 + append_note_response.is_success = True + append_note_response.json.return_value = { + "sha": "abc123", + "target_ref": "refs/notes/commits", + "new_ref_sha": "ghi789", + "result": {"success": True, "status": "ok"}, + } + + delete_note_response = MagicMock() + delete_note_response.status_code = 200 + delete_note_response.is_success = True + delete_note_response.json.return_value = { + "sha": "abc123", + "target_ref": "refs/notes/commits", + "new_ref_sha": "ghi789", + "result": {"success": True, "status": "ok"}, + } + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock( + side_effect=[create_response, create_note_response, append_note_response] + ) + client_instance.request = AsyncMock(return_value=delete_note_response) + + repo = await storage.create_repo(id="test-repo") + + create_result = await repo.create_note(sha="abc123", note="note content") + assert create_result["new_ref_sha"] == "def456" + + append_result = await repo.append_note(sha="abc123", note="note append") + assert append_result["new_ref_sha"] == "ghi789" + + delete_result = await repo.delete_note(sha="abc123") + assert delete_result["target_ref"] == "refs/notes/commits" + + create_call = client_instance.post.call_args_list[1] + assert create_call.kwargs["json"] == { + "sha": "abc123", + "action": "add", + "note": "note content", + } + + append_call = client_instance.post.call_args_list[2] + assert append_call.kwargs["json"] == { + "sha": "abc123", + "action": "append", + "note": "note append", + } + + delete_call = client_instance.request.call_args_list[0] + assert delete_call.args[0] == "DELETE" + assert delete_call.kwargs["json"] == {"sha": "abc123"} + + +class TestRepoDiffOperations: + """Tests for diff operations.""" + + @pytest.mark.asyncio + async def test_get_branch_diff(self, git_storage_options: dict) -> None: + """Test getting branch diff.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + diff_response = MagicMock() + diff_response.status_code = 200 + diff_response.is_success = True + diff_response.json.return_value = { + "branch": "feature", + "base": "main", + "stats": {"additions": 10, "deletions": 5, "files_changed": 2}, + "files": [ + { + "path": "README.md", + "state": "modified", + "raw": "diff --git ...", + "bytes": 100, + "is_eof": True, + }, + { + "path": "new-file.py", + "state": "added", + "raw": "diff --git ...", + "bytes": 200, + "is_eof": True, + }, + ], + "filtered_files": [], + } + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=create_response + ) + mock_client.return_value.__aenter__.return_value.get = AsyncMock( + return_value=diff_response + ) + + repo = await storage.create_repo(id="test-repo") + result = await repo.get_branch_diff(branch="feature", base="main") + + assert result is not None + assert "stats" in result + assert result["stats"]["additions"] == 10 + assert len(result["files"]) == 2 + + @pytest.mark.asyncio + async def test_get_branch_diff_with_ephemeral(self, git_storage_options: dict) -> None: + """Test getting branch diff with ephemeral flag.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + diff_response = MagicMock() + diff_response.status_code = 200 + diff_response.is_success = True + diff_response.json.return_value = { + "branch": "feature", + "base": "main", + "stats": {"additions": 5, "deletions": 2, "files_changed": 1}, + "files": [ + { + "path": "test.py", + "state": "modified", + "raw": "diff --git ...", + "bytes": 50, + "is_eof": True, + } + ], + "filtered_files": [], + } + + with patch("httpx.AsyncClient") as mock_client: + mock_get = AsyncMock(return_value=diff_response) + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=create_response + ) + mock_client.return_value.__aenter__.return_value.get = mock_get + + repo = await storage.create_repo(id="test-repo") + result = await repo.get_branch_diff(branch="feature", base="main", ephemeral=True) + + assert result is not None + assert result["stats"]["additions"] == 5 + + # Verify the URL contains the ephemeral parameter + call_args = mock_get.call_args + url = call_args[0][0] + parsed = urlparse(url) + params = parse_qs(parsed.query) + assert params["ephemeral"] == ["true"] + assert params["branch"] == ["feature"] + assert params["base"] == ["main"] + + @pytest.mark.asyncio + async def test_get_branch_diff_with_ephemeral_base(self, git_storage_options: dict) -> None: + """Test getting branch diff with ephemeral_base flag.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + diff_response = MagicMock() + diff_response.status_code = 200 + diff_response.is_success = True + diff_response.json.return_value = { + "branch": "feature", + "base": "main", + "stats": {"additions": 8, "deletions": 3, "files_changed": 2}, + "files": [], + "filtered_files": [], + } + + with patch("httpx.AsyncClient") as mock_client: + mock_get = AsyncMock(return_value=diff_response) + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=create_response + ) + mock_client.return_value.__aenter__.return_value.get = mock_get + + repo = await storage.create_repo(id="test-repo") + result = await repo.get_branch_diff(branch="feature", base="main", ephemeral_base=True) + + assert result is not None + assert result["stats"]["additions"] == 8 + + # Verify the URL contains the ephemeral_base parameter + call_args = mock_get.call_args + url = call_args[0][0] + parsed = urlparse(url) + params = parse_qs(parsed.query) + assert params["ephemeral_base"] == ["true"] + assert params["branch"] == ["feature"] + assert params["base"] == ["main"] + + @pytest.mark.asyncio + async def test_get_branch_diff_with_both_ephemeral_flags( + self, git_storage_options: dict + ) -> None: + """Test getting branch diff with both ephemeral and ephemeral_base flags.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + diff_response = MagicMock() + diff_response.status_code = 200 + diff_response.is_success = True + diff_response.json.return_value = { + "branch": "feature", + "base": "main", + "stats": {"additions": 12, "deletions": 6, "files_changed": 3}, + "files": [], + "filtered_files": [], + } + + with patch("httpx.AsyncClient") as mock_client: + mock_get = AsyncMock(return_value=diff_response) + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=create_response + ) + mock_client.return_value.__aenter__.return_value.get = mock_get + + repo = await storage.create_repo(id="test-repo") + result = await repo.get_branch_diff( + branch="feature", base="main", ephemeral=True, ephemeral_base=True + ) + + assert result is not None + assert result["stats"]["additions"] == 12 + + # Verify the URL contains both ephemeral parameters + call_args = mock_get.call_args + url = call_args[0][0] + parsed = urlparse(url) + params = parse_qs(parsed.query) + assert params["ephemeral"] == ["true"] + assert params["ephemeral_base"] == ["true"] + assert params["branch"] == ["feature"] + assert params["base"] == ["main"] + + @pytest.mark.asyncio + async def test_get_branch_diff_ephemeral_false(self, git_storage_options: dict) -> None: + """Test getting branch diff with ephemeral explicitly set to False.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + diff_response = MagicMock() + diff_response.status_code = 200 + diff_response.is_success = True + diff_response.json.return_value = { + "branch": "feature", + "base": "main", + "stats": {"additions": 4, "deletions": 1, "files_changed": 1}, + "files": [], + "filtered_files": [], + } + + with patch("httpx.AsyncClient") as mock_client: + mock_get = AsyncMock(return_value=diff_response) + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=create_response + ) + mock_client.return_value.__aenter__.return_value.get = mock_get + + repo = await storage.create_repo(id="test-repo") + result = await repo.get_branch_diff(branch="feature", base="main", ephemeral=False) + + assert result is not None + + # Verify the URL contains ephemeral=false + call_args = mock_get.call_args + url = call_args[0][0] + parsed = urlparse(url) + params = parse_qs(parsed.query) + assert params["ephemeral"] == ["false"] + + @pytest.mark.asyncio + async def test_get_commit_diff(self, git_storage_options: dict) -> None: + """Test getting commit diff.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + diff_response = MagicMock() + diff_response.status_code = 200 + diff_response.is_success = True + diff_response.json.return_value = { + "sha": "abc123", + "stats": {"additions": 3, "deletions": 1, "files_changed": 1}, + "files": [ + { + "path": "config.json", + "state": "modified", + "raw": "diff --git a/config.json b/config.json...", + "bytes": 150, + "is_eof": True, + } + ], + "filtered_files": [], + } + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=create_response + ) + mock_client.return_value.__aenter__.return_value.get = AsyncMock( + return_value=diff_response + ) + + repo = await storage.create_repo(id="test-repo") + result = await repo.get_commit_diff(sha="abc123") + + assert result is not None + assert "stats" in result + assert result["stats"]["files_changed"] == 1 + assert result["files"][0]["path"] == "config.json" + + @pytest.mark.asyncio + async def test_get_commit_diff_with_base_sha(self, git_storage_options: dict) -> None: + """Test getting commit diff with base_sha parameter.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + diff_response = MagicMock() + diff_response.status_code = 200 + diff_response.is_success = True + diff_response.json.return_value = { + "sha": "abc123", + "stats": {"additions": 5, "deletions": 2, "files_changed": 2}, + "files": [ + { + "path": "file1.py", + "state": "modified", + "raw": "diff --git ...", + "bytes": 100, + "is_eof": True, + }, + { + "path": "file2.py", + "state": "added", + "raw": "diff --git ...", + "bytes": 50, + "is_eof": True, + }, + ], + "filtered_files": [], + } + + with patch("httpx.AsyncClient") as mock_client: + mock_get = AsyncMock(return_value=diff_response) + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=create_response + ) + mock_client.return_value.__aenter__.return_value.get = mock_get + + repo = await storage.create_repo(id="test-repo") + result = await repo.get_commit_diff(sha="abc123", base_sha="def456") + + assert result is not None + assert result["stats"]["additions"] == 5 + assert len(result["files"]) == 2 + + # Verify the URL contains the baseSha parameter + call_args = mock_get.call_args + url = call_args[0][0] + parsed = urlparse(url) + params = parse_qs(parsed.query) + assert params["sha"] == ["abc123"] + assert params["baseSha"] == ["def456"] + + +class TestRepoUpstreamOperations: + """Tests for upstream operations.""" + + @pytest.mark.asyncio + async def test_pull_upstream(self, git_storage_options: dict) -> None: + """Test pulling from upstream.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + pull_response = MagicMock() + pull_response.status_code = 202 + pull_response.is_success = True + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(side_effect=[create_response, pull_response]) + + repo = await storage.create_repo(id="test-repo") + # Should not raise an exception + await repo.pull_upstream(ref="main") + + @pytest.mark.asyncio + async def test_restore_commit_json_decode_error(self, git_storage_options: dict) -> None: + """Test restoring commit with non-JSON response (e.g., CDN HTML on 5xx).""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + # Mock a 502 response with HTML instead of JSON + restore_response = MagicMock() + restore_response.status_code = 502 + restore_response.is_success = False + restore_response.reason_phrase = "Bad Gateway" + # Simulate JSON decode error + restore_response.json.side_effect = Exception("JSON decode error") + restore_response.aread = AsyncMock( + return_value=b"502 Bad Gateway" + ) + + with patch("httpx.AsyncClient") as mock_client: + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + side_effect=[create_response, restore_response] + ) + + repo = await storage.create_repo(id="test-repo") + + with pytest.raises(RefUpdateError) as exc_info: + await repo.restore_commit( + target_branch="main", + target_commit_sha="abc123", + commit_message="Restore commit", + author={"name": "Test", "email": "test@example.com"}, + ) + + # Verify we got a RefUpdateError with meaningful message + assert "502" in str(exc_info.value) + assert "Bad Gateway" in str(exc_info.value) + assert exc_info.value.status == "unavailable" # 502 maps to "unavailable" + + @pytest.mark.asyncio + async def test_pull_upstream_no_branch(self, git_storage_options: dict) -> None: + """Test pulling from upstream without specifying branch.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + pull_response = MagicMock() + pull_response.status_code = 202 + pull_response.is_success = True + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(side_effect=[create_response, pull_response]) + + repo = await storage.create_repo(id="test-repo") + # Should work without branch option + await repo.pull_upstream() + + @pytest.mark.asyncio + async def test_create_commit_from_diff(self, git_storage_options: dict) -> None: + """Test creating a commit directly from a diff.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + stream_response = MagicMock() + stream_response.is_success = True + stream_response.aread = AsyncMock( + return_value=( + b'{"commit":{"commit_sha":"diff123","tree_sha":"tree123","target_branch":"main",' + b'"pack_bytes":512,"blob_count":0},"result":{"success":true,"status":"ok",' + b'"branch":"main","old_sha":"old123","new_sha":"diff123"}}' + ) + ) + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(return_value=create_response) + stream_context = MagicMock() + stream_context.__aenter__ = AsyncMock(return_value=stream_response) + stream_context.__aexit__ = AsyncMock(return_value=None) + client_instance.stream = MagicMock(return_value=stream_context) + + repo = await storage.create_repo(id="test-repo") + result = await repo.create_commit_from_diff( + target_branch="main", + commit_message="Apply diff", + diff="--- a/file.txt\n+++ b/file.txt\n@@\n+hello world\n", + author={"name": "Test", "email": "test@example.com"}, + ) + + assert result["commit_sha"] == "diff123" + assert result["ref_update"]["new_sha"] == "diff123" + + client_instance.stream.assert_called_once() + args, _ = client_instance.stream.call_args + assert args[0] == "POST" + assert args[1].endswith("/api/v1/repos/diff-commit") + + @pytest.mark.asyncio + async def test_create_commit_from_diff_failure(self, git_storage_options: dict) -> None: + """Test diff commit raising RefUpdateError on failure.""" + storage = GitStorage(git_storage_options) + + create_response = MagicMock() + create_response.status_code = 200 + create_response.is_success = True + create_response.json.return_value = {"repo_id": "test-repo"} + + stream_response = MagicMock() + stream_response.is_success = True + stream_response.aread = AsyncMock( + return_value=( + b'{"commit":{"commit_sha":"fail123","tree_sha":"tree123","target_branch":"main",' + b'"pack_bytes":512,"blob_count":0},"result":{"success":false,"status":"rejected",' + b'"message":"conflict detected","branch":"main","old_sha":"old123","new_sha":"fail123"}}' + ) + ) + + with patch("httpx.AsyncClient") as mock_client: + client_instance = mock_client.return_value.__aenter__.return_value + client_instance.post = AsyncMock(return_value=create_response) + stream_context = MagicMock() + stream_context.__aenter__ = AsyncMock(return_value=stream_response) + stream_context.__aexit__ = AsyncMock(return_value=None) + client_instance.stream = MagicMock(return_value=stream_context) + + repo = await storage.create_repo(id="test-repo") + + with pytest.raises(RefUpdateError) as exc_info: + await repo.create_commit_from_diff( + target_branch="main", + commit_message="Apply diff", + diff="@diff-content", + author={"name": "Test", "email": "test@example.com"}, + ) + + assert exc_info.value.status == "rejected" + assert "conflict detected" in str(exc_info.value) + + +class TestCodeStorageAgentHeaderInRepo: + """Tests for Code-Storage-Agent header in repo API requests.""" + + @pytest.mark.asyncio + async def test_list_files_includes_agent_header(self, git_storage_options: dict) -> None: + """Test that listFiles includes Code-Storage-Agent header.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.json = MagicMock( + return_value={"repo_id": "test-repo", "url": "https://example.com/repo.git"} + ) + mock_response.status_code = 200 + mock_response.is_success = True + + # Mock list files response + list_files_response = MagicMock() + list_files_response.json = MagicMock(return_value={"paths": [], "ref": "main"}) + list_files_response.status_code = 200 + list_files_response.is_success = True + list_files_response.raise_for_status = MagicMock() + + captured_headers = None + + with patch("httpx.AsyncClient") as mock_client: + mock_get = AsyncMock(return_value=list_files_response) + + async def capture_get(*args, **kwargs): + nonlocal captured_headers + captured_headers = kwargs.get("headers") + return await mock_get(*args, **kwargs) + + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=mock_response + ) + mock_client.return_value.__aenter__.return_value.get = capture_get + + repo = await storage.create_repo(id="test-repo") + await repo.list_files() + + # Verify headers include Code-Storage-Agent + assert captured_headers is not None + assert "Code-Storage-Agent" in captured_headers + assert captured_headers["Code-Storage-Agent"] == get_user_agent() + + @pytest.mark.asyncio + async def test_list_branches_includes_agent_header(self, git_storage_options: dict) -> None: + """Test that listBranches includes Code-Storage-Agent header.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.json = MagicMock( + return_value={"repo_id": "test-repo", "url": "https://example.com/repo.git"} + ) + mock_response.status_code = 200 + mock_response.is_success = True + + # Mock list branches response + list_branches_response = MagicMock() + list_branches_response.json = MagicMock( + return_value={"branches": [], "cursor": None, "has_more": False} + ) + list_branches_response.status_code = 200 + list_branches_response.is_success = True + + captured_headers = None + + with patch("httpx.AsyncClient") as mock_client: + mock_get = AsyncMock(return_value=list_branches_response) + + async def capture_get(*args, **kwargs): + nonlocal captured_headers + captured_headers = kwargs.get("headers") + return await mock_get(*args, **kwargs) + + mock_client.return_value.__aenter__.return_value.post = AsyncMock( + return_value=mock_response + ) + mock_client.return_value.__aenter__.return_value.get = capture_get + + repo = await storage.create_repo(id="test-repo") + await repo.list_branches() + + # Verify headers include Code-Storage-Agent + assert captured_headers is not None + assert "Code-Storage-Agent" in captured_headers + assert captured_headers["Code-Storage-Agent"] == get_user_agent() + + @pytest.mark.asyncio + async def test_create_branch_includes_agent_header(self, git_storage_options: dict) -> None: + """Test that createBranch includes Code-Storage-Agent header.""" + storage = GitStorage(git_storage_options) + + mock_response = MagicMock() + mock_response.json = MagicMock( + return_value={"repo_id": "test-repo", "url": "https://example.com/repo.git"} + ) + mock_response.status_code = 200 + mock_response.is_success = True + + # Mock create branch response + create_branch_response = MagicMock() + create_branch_response.json = MagicMock( + return_value={ + "message": "branch created", + "target_branch": "feature/test", + "target_is_ephemeral": False, + } + ) + create_branch_response.status_code = 200 + create_branch_response.is_success = True + + captured_headers = None + + with patch("httpx.AsyncClient") as mock_client: + + async def capture_post(*args, **kwargs): + nonlocal captured_headers + url = args[0] if args else "" + if "branch" not in url: # createRepo call + return mock_response + else: # createBranch call + captured_headers = kwargs.get("headers") + return create_branch_response + + mock_client.return_value.__aenter__.return_value.post = capture_post + + repo = await storage.create_repo(id="test-repo") + await repo.create_branch(base_branch="main", target_branch="feature/test") + + # Verify headers include Code-Storage-Agent + assert captured_headers is not None + assert "Code-Storage-Agent" in captured_headers + assert captured_headers["Code-Storage-Agent"] == get_user_agent() diff --git a/packages/git-storage-sdk-python/tests/test_version.py b/packages/git-storage-sdk-python/tests/test_version.py new file mode 100644 index 000000000..7d284a542 --- /dev/null +++ b/packages/git-storage-sdk-python/tests/test_version.py @@ -0,0 +1,48 @@ +"""Tests for version module.""" + +from pierre_storage import __version__ +from pierre_storage.version import PACKAGE_NAME, PACKAGE_VERSION, get_user_agent + + +class TestVersion: + """Tests for version constants and functions.""" + + def test_package_name(self) -> None: + """Test PACKAGE_NAME constant.""" + assert PACKAGE_NAME == "code-storage-py-sdk" + + def test_package_version(self) -> None: + """Test PACKAGE_VERSION constant.""" + assert isinstance(PACKAGE_VERSION, str) + assert PACKAGE_VERSION + + def test_package_version_format(self) -> None: + """Test that version follows semantic versioning.""" + import re + + semver_pattern = r"^\d+\.\d+\.\d+(-[a-zA-Z0-9.-]+)?$" + assert re.match(semver_pattern, PACKAGE_VERSION) + + def test_get_user_agent(self) -> None: + """Test get_user_agent function.""" + user_agent = get_user_agent() + assert user_agent == f"{PACKAGE_NAME}/{PACKAGE_VERSION}" + + def test_get_user_agent_consistency(self) -> None: + """Test that get_user_agent returns consistent value.""" + user_agent1 = get_user_agent() + user_agent2 = get_user_agent() + assert user_agent1 == user_agent2 + + def test_user_agent_format(self) -> None: + """Test that user agent follows expected format.""" + import re + + user_agent = get_user_agent() + # Pattern: name/version + pattern = r"^[\w-]+/\d+\.\d+\.\d+(-[a-zA-Z0-9.-]+)?$" + assert re.match(pattern, user_agent) + + def test_init_version_matches_package_version(self) -> None: + """Test that package __version__ tracks PACKAGE_VERSION.""" + assert __version__ == PACKAGE_VERSION diff --git a/packages/git-storage-sdk-python/tests/test_webhook.py b/packages/git-storage-sdk-python/tests/test_webhook.py new file mode 100644 index 000000000..12b06a6ca --- /dev/null +++ b/packages/git-storage-sdk-python/tests/test_webhook.py @@ -0,0 +1,172 @@ +"""Tests for webhook validation.""" + +import hashlib +import hmac +import json +import time + +import pytest + +from pierre_storage.webhook import ( + parse_push_event, + parse_signature_header, + validate_webhook, + validate_webhook_signature, +) + + +class TestWebhookValidation: + """Tests for webhook validation.""" + + def test_parse_signature_header(self) -> None: + """Test parsing signature header.""" + signature = "t=1234567890,sha256=abcdef123456" + parsed = parse_signature_header(signature) + + assert parsed is not None + assert parsed["timestamp"] == "1234567890" + assert parsed["signature"] == "abcdef123456" + + def test_parse_signature_header_invalid(self) -> None: + """Test parsing invalid signature header.""" + assert parse_signature_header("invalid") is None + + def test_validate_webhook_signature(self) -> None: + """Test validating webhook signature.""" + secret = "test-secret" + payload = json.dumps({"test": "data"}) + timestamp = int(time.time()) + + # Create valid signature + signed_payload = f"{timestamp}.{payload}" + signature = hmac.new( + secret.encode("utf-8"), + signed_payload.encode("utf-8"), + hashlib.sha256, + ).hexdigest() + + signature_header = f"t={timestamp},sha256={signature}" + + # Validate + result = validate_webhook_signature( + payload, + signature_header, + secret, + options={"max_age_seconds": 300}, + ) + assert result["valid"] is True + assert result["timestamp"] == timestamp + + def test_validate_webhook_signature_invalid(self) -> None: + """Test validating invalid webhook signature.""" + secret = "test-secret" + payload = json.dumps({"test": "data"}) + timestamp = int(time.time()) + + signature_header = f"t={timestamp},sha256=invalid_signature" + + result = validate_webhook_signature( + payload, + signature_header, + secret, + options={"max_age_seconds": 300}, + ) + assert result["valid"] is False + assert result["error"] == "Invalid signature" + assert result["timestamp"] == timestamp + + def test_validate_webhook_signature_expired(self) -> None: + """Test validating expired webhook signature.""" + secret = "test-secret" + payload = json.dumps({"test": "data"}) + timestamp = int(time.time()) - 400 # 400 seconds ago + + # Create valid signature + signed_payload = f"{timestamp}.{payload}" + signature = hmac.new( + secret.encode("utf-8"), + signed_payload.encode("utf-8"), + hashlib.sha256, + ).hexdigest() + + signature_header = f"t={timestamp},sha256={signature}" + + # Validate with 300 second max age + result = validate_webhook_signature( + payload, + signature_header, + secret, + options={"max_age_seconds": 300}, + ) + assert result["valid"] is False + assert "too old" in result["error"] + assert result["timestamp"] == timestamp + + def test_validate_webhook(self) -> None: + """Test full webhook validation.""" + secret = "test-secret" + payload_data = { + "repository": {"id": "repo-123", "url": "https://test.git"}, + "ref": "refs/heads/main", + "before": "abc123", + "after": "def456", + "customer_id": "cust-123", + "pushed_at": "2024-01-15T10:30:00Z", + } + payload = json.dumps(payload_data) + timestamp = int(time.time()) + + # Create valid signature + signed_payload = f"{timestamp}.{payload}" + signature = hmac.new( + secret.encode("utf-8"), + signed_payload.encode("utf-8"), + hashlib.sha256, + ).hexdigest() + + signature_header = f"t={timestamp},sha256={signature}" + + # Validate + headers = { + "X-Pierre-Signature": signature_header, + "X-Pierre-Event": "push", + } + result = validate_webhook(payload, headers, secret) + assert result["valid"] is True + assert result["event_type"] == "push" + assert result["timestamp"] == timestamp + assert "error" not in result + assert "payload" in result + push_event = result["payload"] + assert push_event["type"] == "push" + assert push_event["repository"]["id"] == "repo-123" + assert push_event["before"] == "abc123" + + def test_parse_push_event(self) -> None: + """Test parsing push event.""" + payload = { + "repository": {"id": "repo-123", "url": "https://test.git"}, + "ref": "refs/heads/main", + "before": "abc123", + "after": "def456", + "customer_id": "cust-123", + "pushed_at": "2024-01-15T10:30:00Z", + } + + event = parse_push_event(payload) + + assert event["type"] == "push" + assert event["repository"]["id"] == "repo-123" + assert event["ref"] == "refs/heads/main" + assert event["before"] == "abc123" + assert event["after"] == "def456" + assert event["customer_id"] == "cust-123" + assert event["raw_pushed_at"] == "2024-01-15T10:30:00Z" + assert event["pushed_at"].year == 2024 + assert event["pushed_at"].month == 1 + assert event["pushed_at"].day == 15 + + def test_parse_push_event_invalid(self) -> None: + """Test parsing invalid push event.""" + with pytest.raises(ValueError, match="Invalid push event payload"): + parse_push_event({"invalid": "data"}) diff --git a/packages/git-storage-sdk-python/uv.lock b/packages/git-storage-sdk-python/uv.lock new file mode 100644 index 000000000..0cf814d77 --- /dev/null +++ b/packages/git-storage-sdk-python/uv.lock @@ -0,0 +1,1460 @@ +version = 1 +revision = 2 +requires-python = ">=3.9" +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version < '3.10'", +] + +[manifest] +constraints = [{ name = "urllib3", specifier = ">=2.6.3" }] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, +] + +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893, upload-time = "2025-07-02T02:27:15.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, +] + +[[package]] +name = "backports-tarfile" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/86/72/cd9b395f25e290e633655a100af28cb253e4393396264a98bd5f5951d50f/backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991", size = 86406, upload-time = "2024-05-28T17:01:54.731Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/fa/123043af240e49752f1c4bd24da5053b6bd00cad78c2be53c0d1e8b975bc/backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34", size = 30181, upload-time = "2024-05-28T17:01:53.112Z" }, +] + +[[package]] +name = "build" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "os_name == 'nt'" }, + { name = "importlib-metadata", marker = "python_full_version < '3.10.2'" }, + { name = "packaging" }, + { name = "pyproject-hooks" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/25/1c/23e33405a7c9eac261dff640926b8b5adaed6a6eb3e1767d441ed611d0c0/build-1.3.0.tar.gz", hash = "sha256:698edd0ea270bde950f53aed21f3a0135672206f3911e0176261a31e0e07b397", size = 48544, upload-time = "2025-08-01T21:27:09.268Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/8c/2b30c12155ad8de0cf641d76a8b396a16d2c36bc6d50b621a62b7c4567c1/build-1.3.0-py3-none-any.whl", hash = "sha256:7145f0b5061ba90a1500d60bd1b13ca0a8a4cebdd0cc16ed8adf1c0e739f43b4", size = 23382, upload-time = "2025-08-01T21:27:07.844Z" }, +] + +[[package]] +name = "certifi" +version = "2025.10.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" }, +] + +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/d7/516d984057745a6cd96575eea814fe1edd6646ee6efd552fb7b0921dec83/cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44", size = 184283, upload-time = "2025-09-08T23:22:08.01Z" }, + { url = "https://files.pythonhosted.org/packages/9e/84/ad6a0b408daa859246f57c03efd28e5dd1b33c21737c2db84cae8c237aa5/cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49", size = 180504, upload-time = "2025-09-08T23:22:10.637Z" }, + { url = "https://files.pythonhosted.org/packages/50/bd/b1a6362b80628111e6653c961f987faa55262b4002fcec42308cad1db680/cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c", size = 208811, upload-time = "2025-09-08T23:22:12.267Z" }, + { url = "https://files.pythonhosted.org/packages/4f/27/6933a8b2562d7bd1fb595074cf99cc81fc3789f6a6c05cdabb46284a3188/cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb", size = 216402, upload-time = "2025-09-08T23:22:13.455Z" }, + { url = "https://files.pythonhosted.org/packages/05/eb/b86f2a2645b62adcfff53b0dd97e8dfafb5c8aa864bd0d9a2c2049a0d551/cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0", size = 203217, upload-time = "2025-09-08T23:22:14.596Z" }, + { url = "https://files.pythonhosted.org/packages/9f/e0/6cbe77a53acf5acc7c08cc186c9928864bd7c005f9efd0d126884858a5fe/cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4", size = 203079, upload-time = "2025-09-08T23:22:15.769Z" }, + { url = "https://files.pythonhosted.org/packages/98/29/9b366e70e243eb3d14a5cb488dfd3a0b6b2f1fb001a203f653b93ccfac88/cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453", size = 216475, upload-time = "2025-09-08T23:22:17.427Z" }, + { url = "https://files.pythonhosted.org/packages/21/7a/13b24e70d2f90a322f2900c5d8e1f14fa7e2a6b3332b7309ba7b2ba51a5a/cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495", size = 218829, upload-time = "2025-09-08T23:22:19.069Z" }, + { url = "https://files.pythonhosted.org/packages/60/99/c9dc110974c59cc981b1f5b66e1d8af8af764e00f0293266824d9c4254bc/cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5", size = 211211, upload-time = "2025-09-08T23:22:20.588Z" }, + { url = "https://files.pythonhosted.org/packages/49/72/ff2d12dbf21aca1b32a40ed792ee6b40f6dc3a9cf1644bd7ef6e95e0ac5e/cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb", size = 218036, upload-time = "2025-09-08T23:22:22.143Z" }, + { url = "https://files.pythonhosted.org/packages/e2/cc/027d7fb82e58c48ea717149b03bcadcbdc293553edb283af792bd4bcbb3f/cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a", size = 172184, upload-time = "2025-09-08T23:22:23.328Z" }, + { url = "https://files.pythonhosted.org/packages/33/fa/072dd15ae27fbb4e06b437eb6e944e75b068deb09e2a2826039e49ee2045/cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739", size = 182790, upload-time = "2025-09-08T23:22:24.752Z" }, + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, + { url = "https://files.pythonhosted.org/packages/c0/cc/08ed5a43f2996a16b462f64a7055c6e962803534924b9b2f1371d8c00b7b/cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf", size = 184288, upload-time = "2025-09-08T23:23:48.404Z" }, + { url = "https://files.pythonhosted.org/packages/3d/de/38d9726324e127f727b4ecc376bc85e505bfe61ef130eaf3f290c6847dd4/cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7", size = 180509, upload-time = "2025-09-08T23:23:49.73Z" }, + { url = "https://files.pythonhosted.org/packages/9b/13/c92e36358fbcc39cf0962e83223c9522154ee8630e1df7c0b3a39a8124e2/cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c", size = 208813, upload-time = "2025-09-08T23:23:51.263Z" }, + { url = "https://files.pythonhosted.org/packages/15/12/a7a79bd0df4c3bff744b2d7e52cc1b68d5e7e427b384252c42366dc1ecbc/cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165", size = 216498, upload-time = "2025-09-08T23:23:52.494Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ad/5c51c1c7600bdd7ed9a24a203ec255dccdd0ebf4527f7b922a0bde2fb6ed/cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534", size = 203243, upload-time = "2025-09-08T23:23:53.836Z" }, + { url = "https://files.pythonhosted.org/packages/32/f2/81b63e288295928739d715d00952c8c6034cb6c6a516b17d37e0c8be5600/cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f", size = 203158, upload-time = "2025-09-08T23:23:55.169Z" }, + { url = "https://files.pythonhosted.org/packages/1f/74/cc4096ce66f5939042ae094e2e96f53426a979864aa1f96a621ad128be27/cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63", size = 216548, upload-time = "2025-09-08T23:23:56.506Z" }, + { url = "https://files.pythonhosted.org/packages/e8/be/f6424d1dc46b1091ffcc8964fa7c0ab0cd36839dd2761b49c90481a6ba1b/cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2", size = 218897, upload-time = "2025-09-08T23:23:57.825Z" }, + { url = "https://files.pythonhosted.org/packages/f7/e0/dda537c2309817edf60109e39265f24f24aa7f050767e22c98c53fe7f48b/cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65", size = 211249, upload-time = "2025-09-08T23:23:59.139Z" }, + { url = "https://files.pythonhosted.org/packages/2b/e7/7c769804eb75e4c4b35e658dba01de1640a351a9653c3d49ca89d16ccc91/cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322", size = 218041, upload-time = "2025-09-08T23:24:00.496Z" }, + { url = "https://files.pythonhosted.org/packages/aa/d9/6218d78f920dcd7507fc16a766b5ef8f3b913cc7aa938e7fc80b9978d089/cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a", size = 172138, upload-time = "2025-09-08T23:24:01.7Z" }, + { url = "https://files.pythonhosted.org/packages/54/8f/a1e836f82d8e32a97e6b29cc8f641779181ac7363734f12df27db803ebda/cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9", size = 182794, upload-time = "2025-09-08T23:24:02.943Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/b8/6d51fc1d52cbd52cd4ccedd5b5b2f0f6a11bbf6765c782298b0f3e808541/charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d", size = 209709, upload-time = "2025-10-14T04:40:11.385Z" }, + { url = "https://files.pythonhosted.org/packages/5c/af/1f9d7f7faafe2ddfb6f72a2e07a548a629c61ad510fe60f9630309908fef/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8", size = 148814, upload-time = "2025-10-14T04:40:13.135Z" }, + { url = "https://files.pythonhosted.org/packages/79/3d/f2e3ac2bbc056ca0c204298ea4e3d9db9b4afe437812638759db2c976b5f/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad", size = 144467, upload-time = "2025-10-14T04:40:14.728Z" }, + { url = "https://files.pythonhosted.org/packages/ec/85/1bf997003815e60d57de7bd972c57dc6950446a3e4ccac43bc3070721856/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8", size = 162280, upload-time = "2025-10-14T04:40:16.14Z" }, + { url = "https://files.pythonhosted.org/packages/3e/8e/6aa1952f56b192f54921c436b87f2aaf7c7a7c3d0d1a765547d64fd83c13/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d", size = 159454, upload-time = "2025-10-14T04:40:17.567Z" }, + { url = "https://files.pythonhosted.org/packages/36/3b/60cbd1f8e93aa25d1c669c649b7a655b0b5fb4c571858910ea9332678558/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313", size = 153609, upload-time = "2025-10-14T04:40:19.08Z" }, + { url = "https://files.pythonhosted.org/packages/64/91/6a13396948b8fd3c4b4fd5bc74d045f5637d78c9675585e8e9fbe5636554/charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e", size = 151849, upload-time = "2025-10-14T04:40:20.607Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7a/59482e28b9981d105691e968c544cc0df3b7d6133152fb3dcdc8f135da7a/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93", size = 151586, upload-time = "2025-10-14T04:40:21.719Z" }, + { url = "https://files.pythonhosted.org/packages/92/59/f64ef6a1c4bdd2baf892b04cd78792ed8684fbc48d4c2afe467d96b4df57/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0", size = 145290, upload-time = "2025-10-14T04:40:23.069Z" }, + { url = "https://files.pythonhosted.org/packages/6b/63/3bf9f279ddfa641ffa1962b0db6a57a9c294361cc2f5fcac997049a00e9c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84", size = 163663, upload-time = "2025-10-14T04:40:24.17Z" }, + { url = "https://files.pythonhosted.org/packages/ed/09/c9e38fc8fa9e0849b172b581fd9803bdf6e694041127933934184e19f8c3/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e", size = 151964, upload-time = "2025-10-14T04:40:25.368Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d1/d28b747e512d0da79d8b6a1ac18b7ab2ecfd81b2944c4c710e166d8dd09c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db", size = 161064, upload-time = "2025-10-14T04:40:26.806Z" }, + { url = "https://files.pythonhosted.org/packages/bb/9a/31d62b611d901c3b9e5500c36aab0ff5eb442043fb3a1c254200d3d397d9/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6", size = 155015, upload-time = "2025-10-14T04:40:28.284Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/107e008fa2bff0c8b9319584174418e5e5285fef32f79d8ee6a430d0039c/charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f", size = 99792, upload-time = "2025-10-14T04:40:29.613Z" }, + { url = "https://files.pythonhosted.org/packages/eb/66/e396e8a408843337d7315bab30dbf106c38966f1819f123257f5520f8a96/charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d", size = 107198, upload-time = "2025-10-14T04:40:30.644Z" }, + { url = "https://files.pythonhosted.org/packages/b5/58/01b4f815bf0312704c267f2ccb6e5d42bcc7752340cd487bc9f8c3710597/charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69", size = 100262, upload-time = "2025-10-14T04:40:32.108Z" }, + { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" }, + { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" }, + { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" }, + { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" }, + { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" }, + { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" }, + { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" }, + { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" }, + { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" }, + { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" }, + { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" }, + { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" }, + { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" }, + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, + { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, + { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, + { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, + { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, + { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, + { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, + { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, + { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, + { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, + { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, + { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, + { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, + { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, + { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, + { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, + { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, + { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, + { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, + { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, + { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, + { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, + { url = "https://files.pythonhosted.org/packages/46/7c/0c4760bccf082737ca7ab84a4c2034fcc06b1f21cf3032ea98bd6feb1725/charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9", size = 209609, upload-time = "2025-10-14T04:42:10.922Z" }, + { url = "https://files.pythonhosted.org/packages/bb/a4/69719daef2f3d7f1819de60c9a6be981b8eeead7542d5ec4440f3c80e111/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d", size = 149029, upload-time = "2025-10-14T04:42:12.38Z" }, + { url = "https://files.pythonhosted.org/packages/e6/21/8d4e1d6c1e6070d3672908b8e4533a71b5b53e71d16828cc24d0efec564c/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608", size = 144580, upload-time = "2025-10-14T04:42:13.549Z" }, + { url = "https://files.pythonhosted.org/packages/a7/0a/a616d001b3f25647a9068e0b9199f697ce507ec898cacb06a0d5a1617c99/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc", size = 162340, upload-time = "2025-10-14T04:42:14.892Z" }, + { url = "https://files.pythonhosted.org/packages/85/93/060b52deb249a5450460e0585c88a904a83aec474ab8e7aba787f45e79f2/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e", size = 159619, upload-time = "2025-10-14T04:42:16.676Z" }, + { url = "https://files.pythonhosted.org/packages/dd/21/0274deb1cc0632cd587a9a0ec6b4674d9108e461cb4cd40d457adaeb0564/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1", size = 153980, upload-time = "2025-10-14T04:42:17.917Z" }, + { url = "https://files.pythonhosted.org/packages/28/2b/e3d7d982858dccc11b31906976323d790dded2017a0572f093ff982d692f/charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3", size = 152174, upload-time = "2025-10-14T04:42:19.018Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ff/4a269f8e35f1e58b2df52c131a1fa019acb7ef3f8697b7d464b07e9b492d/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6", size = 151666, upload-time = "2025-10-14T04:42:20.171Z" }, + { url = "https://files.pythonhosted.org/packages/da/c9/ec39870f0b330d58486001dd8e532c6b9a905f5765f58a6f8204926b4a93/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88", size = 145550, upload-time = "2025-10-14T04:42:21.324Z" }, + { url = "https://files.pythonhosted.org/packages/75/8f/d186ab99e40e0ed9f82f033d6e49001701c81244d01905dd4a6924191a30/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1", size = 163721, upload-time = "2025-10-14T04:42:22.46Z" }, + { url = "https://files.pythonhosted.org/packages/96/b1/6047663b9744df26a7e479ac1e77af7134b1fcf9026243bb48ee2d18810f/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf", size = 152127, upload-time = "2025-10-14T04:42:23.712Z" }, + { url = "https://files.pythonhosted.org/packages/59/78/e5a6eac9179f24f704d1be67d08704c3c6ab9f00963963524be27c18ed87/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318", size = 161175, upload-time = "2025-10-14T04:42:24.87Z" }, + { url = "https://files.pythonhosted.org/packages/e5/43/0e626e42d54dd2f8dd6fc5e1c5ff00f05fbca17cb699bedead2cae69c62f/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c", size = 155375, upload-time = "2025-10-14T04:42:27.246Z" }, + { url = "https://files.pythonhosted.org/packages/e9/91/d9615bf2e06f35e4997616ff31248c3657ed649c5ab9d35ea12fce54e380/charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505", size = 99692, upload-time = "2025-10-14T04:42:28.425Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a9/6c040053909d9d1ef4fcab45fddec083aedc9052c10078339b47c8573ea8/charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966", size = 107192, upload-time = "2025-10-14T04:42:29.482Z" }, + { url = "https://files.pythonhosted.org/packages/f0/c6/4fa536b2c0cd3edfb7ccf8469fa0f363ea67b7213a842b90909ca33dd851/charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50", size = 100220, upload-time = "2025-10-14T04:42:30.632Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.10.7" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/51/26/d22c300112504f5f9a9fd2297ce33c35f3d353e4aeb987c8419453b2a7c2/coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239", size = 827704, upload-time = "2025-09-21T20:03:56.815Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/6c/3a3f7a46888e69d18abe3ccc6fe4cb16cccb1e6a2f99698931dafca489e6/coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a", size = 217987, upload-time = "2025-09-21T20:00:57.218Z" }, + { url = "https://files.pythonhosted.org/packages/03/94/952d30f180b1a916c11a56f5c22d3535e943aa22430e9e3322447e520e1c/coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5", size = 218388, upload-time = "2025-09-21T20:01:00.081Z" }, + { url = "https://files.pythonhosted.org/packages/50/2b/9e0cf8ded1e114bcd8b2fd42792b57f1c4e9e4ea1824cde2af93a67305be/coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:240af60539987ced2c399809bd34f7c78e8abe0736af91c3d7d0e795df633d17", size = 245148, upload-time = "2025-09-21T20:01:01.768Z" }, + { url = "https://files.pythonhosted.org/packages/19/20/d0384ac06a6f908783d9b6aa6135e41b093971499ec488e47279f5b846e6/coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8421e088bc051361b01c4b3a50fd39a4b9133079a2229978d9d30511fd05231b", size = 246958, upload-time = "2025-09-21T20:01:03.355Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/5c283cff3d41285f8eab897651585db908a909c572bdc014bcfaf8a8b6ae/coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6be8ed3039ae7f7ac5ce058c308484787c86e8437e72b30bf5e88b8ea10f3c87", size = 248819, upload-time = "2025-09-21T20:01:04.968Z" }, + { url = "https://files.pythonhosted.org/packages/60/22/02eb98fdc5ff79f423e990d877693e5310ae1eab6cb20ae0b0b9ac45b23b/coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e28299d9f2e889e6d51b1f043f58d5f997c373cc12e6403b90df95b8b047c13e", size = 245754, upload-time = "2025-09-21T20:01:06.321Z" }, + { url = "https://files.pythonhosted.org/packages/b4/bc/25c83bcf3ad141b32cd7dc45485ef3c01a776ca3aa8ef0a93e77e8b5bc43/coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4e16bd7761c5e454f4efd36f345286d6f7c5fa111623c355691e2755cae3b9e", size = 246860, upload-time = "2025-09-21T20:01:07.605Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b7/95574702888b58c0928a6e982038c596f9c34d52c5e5107f1eef729399b5/coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b1c81d0e5e160651879755c9c675b974276f135558cf4ba79fee7b8413a515df", size = 244877, upload-time = "2025-09-21T20:01:08.829Z" }, + { url = "https://files.pythonhosted.org/packages/47/b6/40095c185f235e085df0e0b158f6bd68cc6e1d80ba6c7721dc81d97ec318/coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:606cc265adc9aaedcc84f1f064f0e8736bc45814f15a357e30fca7ecc01504e0", size = 245108, upload-time = "2025-09-21T20:01:10.527Z" }, + { url = "https://files.pythonhosted.org/packages/c8/50/4aea0556da7a4b93ec9168420d170b55e2eb50ae21b25062513d020c6861/coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:10b24412692df990dbc34f8fb1b6b13d236ace9dfdd68df5b28c2e39cafbba13", size = 245752, upload-time = "2025-09-21T20:01:11.857Z" }, + { url = "https://files.pythonhosted.org/packages/6a/28/ea1a84a60828177ae3b100cb6723838523369a44ec5742313ed7db3da160/coverage-7.10.7-cp310-cp310-win32.whl", hash = "sha256:b51dcd060f18c19290d9b8a9dd1e0181538df2ce0717f562fff6cf74d9fc0b5b", size = 220497, upload-time = "2025-09-21T20:01:13.459Z" }, + { url = "https://files.pythonhosted.org/packages/fc/1a/a81d46bbeb3c3fd97b9602ebaa411e076219a150489bcc2c025f151bd52d/coverage-7.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:3a622ac801b17198020f09af3eaf45666b344a0d69fc2a6ffe2ea83aeef1d807", size = 221392, upload-time = "2025-09-21T20:01:14.722Z" }, + { url = "https://files.pythonhosted.org/packages/d2/5d/c1a17867b0456f2e9ce2d8d4708a4c3a089947d0bec9c66cdf60c9e7739f/coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59", size = 218102, upload-time = "2025-09-21T20:01:16.089Z" }, + { url = "https://files.pythonhosted.org/packages/54/f0/514dcf4b4e3698b9a9077f084429681bf3aad2b4a72578f89d7f643eb506/coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a", size = 218505, upload-time = "2025-09-21T20:01:17.788Z" }, + { url = "https://files.pythonhosted.org/packages/20/f6/9626b81d17e2a4b25c63ac1b425ff307ecdeef03d67c9a147673ae40dc36/coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699", size = 248898, upload-time = "2025-09-21T20:01:19.488Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ef/bd8e719c2f7417ba03239052e099b76ea1130ac0cbb183ee1fcaa58aaff3/coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d", size = 250831, upload-time = "2025-09-21T20:01:20.817Z" }, + { url = "https://files.pythonhosted.org/packages/a5/b6/bf054de41ec948b151ae2b79a55c107f5760979538f5fb80c195f2517718/coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e", size = 252937, upload-time = "2025-09-21T20:01:22.171Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e5/3860756aa6f9318227443c6ce4ed7bf9e70bb7f1447a0353f45ac5c7974b/coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23", size = 249021, upload-time = "2025-09-21T20:01:23.907Z" }, + { url = "https://files.pythonhosted.org/packages/26/0f/bd08bd042854f7fd07b45808927ebcce99a7ed0f2f412d11629883517ac2/coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab", size = 250626, upload-time = "2025-09-21T20:01:25.721Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a7/4777b14de4abcc2e80c6b1d430f5d51eb18ed1d75fca56cbce5f2db9b36e/coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82", size = 248682, upload-time = "2025-09-21T20:01:27.105Z" }, + { url = "https://files.pythonhosted.org/packages/34/72/17d082b00b53cd45679bad682fac058b87f011fd8b9fe31d77f5f8d3a4e4/coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2", size = 248402, upload-time = "2025-09-21T20:01:28.629Z" }, + { url = "https://files.pythonhosted.org/packages/81/7a/92367572eb5bdd6a84bfa278cc7e97db192f9f45b28c94a9ca1a921c3577/coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61", size = 249320, upload-time = "2025-09-21T20:01:30.004Z" }, + { url = "https://files.pythonhosted.org/packages/2f/88/a23cc185f6a805dfc4fdf14a94016835eeb85e22ac3a0e66d5e89acd6462/coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14", size = 220536, upload-time = "2025-09-21T20:01:32.184Z" }, + { url = "https://files.pythonhosted.org/packages/fe/ef/0b510a399dfca17cec7bc2f05ad8bd78cf55f15c8bc9a73ab20c5c913c2e/coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2", size = 221425, upload-time = "2025-09-21T20:01:33.557Z" }, + { url = "https://files.pythonhosted.org/packages/51/7f/023657f301a276e4ba1850f82749bc136f5a7e8768060c2e5d9744a22951/coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a", size = 220103, upload-time = "2025-09-21T20:01:34.929Z" }, + { url = "https://files.pythonhosted.org/packages/13/e4/eb12450f71b542a53972d19117ea5a5cea1cab3ac9e31b0b5d498df1bd5a/coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417", size = 218290, upload-time = "2025-09-21T20:01:36.455Z" }, + { url = "https://files.pythonhosted.org/packages/37/66/593f9be12fc19fb36711f19a5371af79a718537204d16ea1d36f16bd78d2/coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973", size = 218515, upload-time = "2025-09-21T20:01:37.982Z" }, + { url = "https://files.pythonhosted.org/packages/66/80/4c49f7ae09cafdacc73fbc30949ffe77359635c168f4e9ff33c9ebb07838/coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c", size = 250020, upload-time = "2025-09-21T20:01:39.617Z" }, + { url = "https://files.pythonhosted.org/packages/a6/90/a64aaacab3b37a17aaedd83e8000142561a29eb262cede42d94a67f7556b/coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7", size = 252769, upload-time = "2025-09-21T20:01:41.341Z" }, + { url = "https://files.pythonhosted.org/packages/98/2e/2dda59afd6103b342e096f246ebc5f87a3363b5412609946c120f4e7750d/coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6", size = 253901, upload-time = "2025-09-21T20:01:43.042Z" }, + { url = "https://files.pythonhosted.org/packages/53/dc/8d8119c9051d50f3119bb4a75f29f1e4a6ab9415cd1fa8bf22fcc3fb3b5f/coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59", size = 250413, upload-time = "2025-09-21T20:01:44.469Z" }, + { url = "https://files.pythonhosted.org/packages/98/b3/edaff9c5d79ee4d4b6d3fe046f2b1d799850425695b789d491a64225d493/coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b", size = 251820, upload-time = "2025-09-21T20:01:45.915Z" }, + { url = "https://files.pythonhosted.org/packages/11/25/9a0728564bb05863f7e513e5a594fe5ffef091b325437f5430e8cfb0d530/coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a", size = 249941, upload-time = "2025-09-21T20:01:47.296Z" }, + { url = "https://files.pythonhosted.org/packages/e0/fd/ca2650443bfbef5b0e74373aac4df67b08180d2f184b482c41499668e258/coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb", size = 249519, upload-time = "2025-09-21T20:01:48.73Z" }, + { url = "https://files.pythonhosted.org/packages/24/79/f692f125fb4299b6f963b0745124998ebb8e73ecdfce4ceceb06a8c6bec5/coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1", size = 251375, upload-time = "2025-09-21T20:01:50.529Z" }, + { url = "https://files.pythonhosted.org/packages/5e/75/61b9bbd6c7d24d896bfeec57acba78e0f8deac68e6baf2d4804f7aae1f88/coverage-7.10.7-cp312-cp312-win32.whl", hash = "sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256", size = 220699, upload-time = "2025-09-21T20:01:51.941Z" }, + { url = "https://files.pythonhosted.org/packages/ca/f3/3bf7905288b45b075918d372498f1cf845b5b579b723c8fd17168018d5f5/coverage-7.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba", size = 221512, upload-time = "2025-09-21T20:01:53.481Z" }, + { url = "https://files.pythonhosted.org/packages/5c/44/3e32dbe933979d05cf2dac5e697c8599cfe038aaf51223ab901e208d5a62/coverage-7.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf", size = 220147, upload-time = "2025-09-21T20:01:55.2Z" }, + { url = "https://files.pythonhosted.org/packages/9a/94/b765c1abcb613d103b64fcf10395f54d69b0ef8be6a0dd9c524384892cc7/coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d", size = 218320, upload-time = "2025-09-21T20:01:56.629Z" }, + { url = "https://files.pythonhosted.org/packages/72/4f/732fff31c119bb73b35236dd333030f32c4bfe909f445b423e6c7594f9a2/coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b", size = 218575, upload-time = "2025-09-21T20:01:58.203Z" }, + { url = "https://files.pythonhosted.org/packages/87/02/ae7e0af4b674be47566707777db1aa375474f02a1d64b9323e5813a6cdd5/coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e", size = 249568, upload-time = "2025-09-21T20:01:59.748Z" }, + { url = "https://files.pythonhosted.org/packages/a2/77/8c6d22bf61921a59bce5471c2f1f7ac30cd4ac50aadde72b8c48d5727902/coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b", size = 252174, upload-time = "2025-09-21T20:02:01.192Z" }, + { url = "https://files.pythonhosted.org/packages/b1/20/b6ea4f69bbb52dac0aebd62157ba6a9dddbfe664f5af8122dac296c3ee15/coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49", size = 253447, upload-time = "2025-09-21T20:02:02.701Z" }, + { url = "https://files.pythonhosted.org/packages/f9/28/4831523ba483a7f90f7b259d2018fef02cb4d5b90bc7c1505d6e5a84883c/coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911", size = 249779, upload-time = "2025-09-21T20:02:04.185Z" }, + { url = "https://files.pythonhosted.org/packages/a7/9f/4331142bc98c10ca6436d2d620c3e165f31e6c58d43479985afce6f3191c/coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0", size = 251604, upload-time = "2025-09-21T20:02:06.034Z" }, + { url = "https://files.pythonhosted.org/packages/ce/60/bda83b96602036b77ecf34e6393a3836365481b69f7ed7079ab85048202b/coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f", size = 249497, upload-time = "2025-09-21T20:02:07.619Z" }, + { url = "https://files.pythonhosted.org/packages/5f/af/152633ff35b2af63977edd835d8e6430f0caef27d171edf2fc76c270ef31/coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c", size = 249350, upload-time = "2025-09-21T20:02:10.34Z" }, + { url = "https://files.pythonhosted.org/packages/9d/71/d92105d122bd21cebba877228990e1646d862e34a98bb3374d3fece5a794/coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f", size = 251111, upload-time = "2025-09-21T20:02:12.122Z" }, + { url = "https://files.pythonhosted.org/packages/a2/9e/9fdb08f4bf476c912f0c3ca292e019aab6712c93c9344a1653986c3fd305/coverage-7.10.7-cp313-cp313-win32.whl", hash = "sha256:dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698", size = 220746, upload-time = "2025-09-21T20:02:13.919Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b1/a75fd25df44eab52d1931e89980d1ada46824c7a3210be0d3c88a44aaa99/coverage-7.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843", size = 221541, upload-time = "2025-09-21T20:02:15.57Z" }, + { url = "https://files.pythonhosted.org/packages/14/3a/d720d7c989562a6e9a14b2c9f5f2876bdb38e9367126d118495b89c99c37/coverage-7.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546", size = 220170, upload-time = "2025-09-21T20:02:17.395Z" }, + { url = "https://files.pythonhosted.org/packages/bb/22/e04514bf2a735d8b0add31d2b4ab636fc02370730787c576bb995390d2d5/coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c", size = 219029, upload-time = "2025-09-21T20:02:18.936Z" }, + { url = "https://files.pythonhosted.org/packages/11/0b/91128e099035ece15da3445d9015e4b4153a6059403452d324cbb0a575fa/coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15", size = 219259, upload-time = "2025-09-21T20:02:20.44Z" }, + { url = "https://files.pythonhosted.org/packages/8b/51/66420081e72801536a091a0c8f8c1f88a5c4bf7b9b1bdc6222c7afe6dc9b/coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4", size = 260592, upload-time = "2025-09-21T20:02:22.313Z" }, + { url = "https://files.pythonhosted.org/packages/5d/22/9b8d458c2881b22df3db5bb3e7369e63d527d986decb6c11a591ba2364f7/coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0", size = 262768, upload-time = "2025-09-21T20:02:24.287Z" }, + { url = "https://files.pythonhosted.org/packages/f7/08/16bee2c433e60913c610ea200b276e8eeef084b0d200bdcff69920bd5828/coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0", size = 264995, upload-time = "2025-09-21T20:02:26.133Z" }, + { url = "https://files.pythonhosted.org/packages/20/9d/e53eb9771d154859b084b90201e5221bca7674ba449a17c101a5031d4054/coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65", size = 259546, upload-time = "2025-09-21T20:02:27.716Z" }, + { url = "https://files.pythonhosted.org/packages/ad/b0/69bc7050f8d4e56a89fb550a1577d5d0d1db2278106f6f626464067b3817/coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541", size = 262544, upload-time = "2025-09-21T20:02:29.216Z" }, + { url = "https://files.pythonhosted.org/packages/ef/4b/2514b060dbd1bc0aaf23b852c14bb5818f244c664cb16517feff6bb3a5ab/coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6", size = 260308, upload-time = "2025-09-21T20:02:31.226Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/7ba2175007c246d75e496f64c06e94122bdb914790a1285d627a918bd271/coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999", size = 258920, upload-time = "2025-09-21T20:02:32.823Z" }, + { url = "https://files.pythonhosted.org/packages/c0/b3/fac9f7abbc841409b9a410309d73bfa6cfb2e51c3fada738cb607ce174f8/coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2", size = 261434, upload-time = "2025-09-21T20:02:34.86Z" }, + { url = "https://files.pythonhosted.org/packages/ee/51/a03bec00d37faaa891b3ff7387192cef20f01604e5283a5fabc95346befa/coverage-7.10.7-cp313-cp313t-win32.whl", hash = "sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a", size = 221403, upload-time = "2025-09-21T20:02:37.034Z" }, + { url = "https://files.pythonhosted.org/packages/53/22/3cf25d614e64bf6d8e59c7c669b20d6d940bb337bdee5900b9ca41c820bb/coverage-7.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb", size = 222469, upload-time = "2025-09-21T20:02:39.011Z" }, + { url = "https://files.pythonhosted.org/packages/49/a1/00164f6d30d8a01c3c9c48418a7a5be394de5349b421b9ee019f380df2a0/coverage-7.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb", size = 220731, upload-time = "2025-09-21T20:02:40.939Z" }, + { url = "https://files.pythonhosted.org/packages/23/9c/5844ab4ca6a4dd97a1850e030a15ec7d292b5c5cb93082979225126e35dd/coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b06f260b16ead11643a5a9f955bd4b5fd76c1a4c6796aeade8520095b75de520", size = 218302, upload-time = "2025-09-21T20:02:42.527Z" }, + { url = "https://files.pythonhosted.org/packages/f0/89/673f6514b0961d1f0e20ddc242e9342f6da21eaba3489901b565c0689f34/coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:212f8f2e0612778f09c55dd4872cb1f64a1f2b074393d139278ce902064d5b32", size = 218578, upload-time = "2025-09-21T20:02:44.468Z" }, + { url = "https://files.pythonhosted.org/packages/05/e8/261cae479e85232828fb17ad536765c88dd818c8470aca690b0ac6feeaa3/coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3445258bcded7d4aa630ab8296dea4d3f15a255588dd535f980c193ab6b95f3f", size = 249629, upload-time = "2025-09-21T20:02:46.503Z" }, + { url = "https://files.pythonhosted.org/packages/82/62/14ed6546d0207e6eda876434e3e8475a3e9adbe32110ce896c9e0c06bb9a/coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb45474711ba385c46a0bfe696c695a929ae69ac636cda8f532be9e8c93d720a", size = 252162, upload-time = "2025-09-21T20:02:48.689Z" }, + { url = "https://files.pythonhosted.org/packages/ff/49/07f00db9ac6478e4358165a08fb41b469a1b053212e8a00cb02f0d27a05f/coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:813922f35bd800dca9994c5971883cbc0d291128a5de6b167c7aa697fcf59360", size = 253517, upload-time = "2025-09-21T20:02:50.31Z" }, + { url = "https://files.pythonhosted.org/packages/a2/59/c5201c62dbf165dfbc91460f6dbbaa85a8b82cfa6131ac45d6c1bfb52deb/coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:93c1b03552081b2a4423091d6fb3787265b8f86af404cff98d1b5342713bdd69", size = 249632, upload-time = "2025-09-21T20:02:51.971Z" }, + { url = "https://files.pythonhosted.org/packages/07/ae/5920097195291a51fb00b3a70b9bbd2edbfe3c84876a1762bd1ef1565ebc/coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cc87dd1b6eaf0b848eebb1c86469b9f72a1891cb42ac7adcfbce75eadb13dd14", size = 251520, upload-time = "2025-09-21T20:02:53.858Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3c/a815dde77a2981f5743a60b63df31cb322c944843e57dbd579326625a413/coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:39508ffda4f343c35f3236fe8d1a6634a51f4581226a1262769d7f970e73bffe", size = 249455, upload-time = "2025-09-21T20:02:55.807Z" }, + { url = "https://files.pythonhosted.org/packages/aa/99/f5cdd8421ea656abefb6c0ce92556709db2265c41e8f9fc6c8ae0f7824c9/coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:925a1edf3d810537c5a3abe78ec5530160c5f9a26b1f4270b40e62cc79304a1e", size = 249287, upload-time = "2025-09-21T20:02:57.784Z" }, + { url = "https://files.pythonhosted.org/packages/c3/7a/e9a2da6a1fc5d007dd51fca083a663ab930a8c4d149c087732a5dbaa0029/coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2c8b9a0636f94c43cd3576811e05b89aa9bc2d0a85137affc544ae5cb0e4bfbd", size = 250946, upload-time = "2025-09-21T20:02:59.431Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5b/0b5799aa30380a949005a353715095d6d1da81927d6dbed5def2200a4e25/coverage-7.10.7-cp314-cp314-win32.whl", hash = "sha256:b7b8288eb7cdd268b0304632da8cb0bb93fadcfec2fe5712f7b9cc8f4d487be2", size = 221009, upload-time = "2025-09-21T20:03:01.324Z" }, + { url = "https://files.pythonhosted.org/packages/da/b0/e802fbb6eb746de006490abc9bb554b708918b6774b722bb3a0e6aa1b7de/coverage-7.10.7-cp314-cp314-win_amd64.whl", hash = "sha256:1ca6db7c8807fb9e755d0379ccc39017ce0a84dcd26d14b5a03b78563776f681", size = 221804, upload-time = "2025-09-21T20:03:03.4Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e8/71d0c8e374e31f39e3389bb0bd19e527d46f00ea8571ec7ec8fd261d8b44/coverage-7.10.7-cp314-cp314-win_arm64.whl", hash = "sha256:097c1591f5af4496226d5783d036bf6fd6cd0cbc132e071b33861de756efb880", size = 220384, upload-time = "2025-09-21T20:03:05.111Z" }, + { url = "https://files.pythonhosted.org/packages/62/09/9a5608d319fa3eba7a2019addeacb8c746fb50872b57a724c9f79f146969/coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:a62c6ef0d50e6de320c270ff91d9dd0a05e7250cac2a800b7784bae474506e63", size = 219047, upload-time = "2025-09-21T20:03:06.795Z" }, + { url = "https://files.pythonhosted.org/packages/f5/6f/f58d46f33db9f2e3647b2d0764704548c184e6f5e014bef528b7f979ef84/coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9fa6e4dd51fe15d8738708a973470f67a855ca50002294852e9571cdbd9433f2", size = 219266, upload-time = "2025-09-21T20:03:08.495Z" }, + { url = "https://files.pythonhosted.org/packages/74/5c/183ffc817ba68e0b443b8c934c8795553eb0c14573813415bd59941ee165/coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8fb190658865565c549b6b4706856d6a7b09302c797eb2cf8e7fe9dabb043f0d", size = 260767, upload-time = "2025-09-21T20:03:10.172Z" }, + { url = "https://files.pythonhosted.org/packages/0f/48/71a8abe9c1ad7e97548835e3cc1adbf361e743e9d60310c5f75c9e7bf847/coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:affef7c76a9ef259187ef31599a9260330e0335a3011732c4b9effa01e1cd6e0", size = 262931, upload-time = "2025-09-21T20:03:11.861Z" }, + { url = "https://files.pythonhosted.org/packages/84/fd/193a8fb132acfc0a901f72020e54be5e48021e1575bb327d8ee1097a28fd/coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e16e07d85ca0cf8bafe5f5d23a0b850064e8e945d5677492b06bbe6f09cc699", size = 265186, upload-time = "2025-09-21T20:03:13.539Z" }, + { url = "https://files.pythonhosted.org/packages/b1/8f/74ecc30607dd95ad50e3034221113ccb1c6d4e8085cc761134782995daae/coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:03ffc58aacdf65d2a82bbeb1ffe4d01ead4017a21bfd0454983b88ca73af94b9", size = 259470, upload-time = "2025-09-21T20:03:15.584Z" }, + { url = "https://files.pythonhosted.org/packages/0f/55/79ff53a769f20d71b07023ea115c9167c0bb56f281320520cf64c5298a96/coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1b4fd784344d4e52647fd7857b2af5b3fbe6c239b0b5fa63e94eb67320770e0f", size = 262626, upload-time = "2025-09-21T20:03:17.673Z" }, + { url = "https://files.pythonhosted.org/packages/88/e2/dac66c140009b61ac3fc13af673a574b00c16efdf04f9b5c740703e953c0/coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:0ebbaddb2c19b71912c6f2518e791aa8b9f054985a0769bdb3a53ebbc765c6a1", size = 260386, upload-time = "2025-09-21T20:03:19.36Z" }, + { url = "https://files.pythonhosted.org/packages/a2/f1/f48f645e3f33bb9ca8a496bc4a9671b52f2f353146233ebd7c1df6160440/coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a2d9a3b260cc1d1dbdb1c582e63ddcf5363426a1a68faa0f5da28d8ee3c722a0", size = 258852, upload-time = "2025-09-21T20:03:21.007Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3b/8442618972c51a7affeead957995cfa8323c0c9bcf8fa5a027421f720ff4/coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a3cc8638b2480865eaa3926d192e64ce6c51e3d29c849e09d5b4ad95efae5399", size = 261534, upload-time = "2025-09-21T20:03:23.12Z" }, + { url = "https://files.pythonhosted.org/packages/b2/dc/101f3fa3a45146db0cb03f5b4376e24c0aac818309da23e2de0c75295a91/coverage-7.10.7-cp314-cp314t-win32.whl", hash = "sha256:67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235", size = 221784, upload-time = "2025-09-21T20:03:24.769Z" }, + { url = "https://files.pythonhosted.org/packages/4c/a1/74c51803fc70a8a40d7346660379e144be772bab4ac7bb6e6b905152345c/coverage-7.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d", size = 222905, upload-time = "2025-09-21T20:03:26.93Z" }, + { url = "https://files.pythonhosted.org/packages/12/65/f116a6d2127df30bcafbceef0302d8a64ba87488bf6f73a6d8eebf060873/coverage-7.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a", size = 220922, upload-time = "2025-09-21T20:03:28.672Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ad/d1c25053764b4c42eb294aae92ab617d2e4f803397f9c7c8295caa77a260/coverage-7.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fff7b9c3f19957020cac546c70025331113d2e61537f6e2441bc7657913de7d3", size = 217978, upload-time = "2025-09-21T20:03:30.362Z" }, + { url = "https://files.pythonhosted.org/packages/52/2f/b9f9daa39b80ece0b9548bbb723381e29bc664822d9a12c2135f8922c22b/coverage-7.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bc91b314cef27742da486d6839b677b3f2793dfe52b51bbbb7cf736d5c29281c", size = 218370, upload-time = "2025-09-21T20:03:32.147Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6e/30d006c3b469e58449650642383dddf1c8fb63d44fdf92994bfd46570695/coverage-7.10.7-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:567f5c155eda8df1d3d439d40a45a6a5f029b429b06648235f1e7e51b522b396", size = 244802, upload-time = "2025-09-21T20:03:33.919Z" }, + { url = "https://files.pythonhosted.org/packages/b0/49/8a070782ce7e6b94ff6a0b6d7c65ba6bc3091d92a92cef4cd4eb0767965c/coverage-7.10.7-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2af88deffcc8a4d5974cf2d502251bc3b2db8461f0b66d80a449c33757aa9f40", size = 246625, upload-time = "2025-09-21T20:03:36.09Z" }, + { url = "https://files.pythonhosted.org/packages/6a/92/1c1c5a9e8677ce56d42b97bdaca337b2d4d9ebe703d8c174ede52dbabd5f/coverage-7.10.7-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c7315339eae3b24c2d2fa1ed7d7a38654cba34a13ef19fbcb9425da46d3dc594", size = 248399, upload-time = "2025-09-21T20:03:38.342Z" }, + { url = "https://files.pythonhosted.org/packages/c0/54/b140edee7257e815de7426d5d9846b58505dffc29795fff2dfb7f8a1c5a0/coverage-7.10.7-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:912e6ebc7a6e4adfdbb1aec371ad04c68854cd3bf3608b3514e7ff9062931d8a", size = 245142, upload-time = "2025-09-21T20:03:40.591Z" }, + { url = "https://files.pythonhosted.org/packages/e4/9e/6d6b8295940b118e8b7083b29226c71f6154f7ff41e9ca431f03de2eac0d/coverage-7.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f49a05acd3dfe1ce9715b657e28d138578bc40126760efb962322c56e9ca344b", size = 246284, upload-time = "2025-09-21T20:03:42.355Z" }, + { url = "https://files.pythonhosted.org/packages/db/e5/5e957ca747d43dbe4d9714358375c7546cb3cb533007b6813fc20fce37ad/coverage-7.10.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cce2109b6219f22ece99db7644b9622f54a4e915dad65660ec435e89a3ea7cc3", size = 244353, upload-time = "2025-09-21T20:03:44.218Z" }, + { url = "https://files.pythonhosted.org/packages/9a/45/540fc5cc92536a1b783b7ef99450bd55a4b3af234aae35a18a339973ce30/coverage-7.10.7-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:f3c887f96407cea3916294046fc7dab611c2552beadbed4ea901cbc6a40cc7a0", size = 244430, upload-time = "2025-09-21T20:03:46.065Z" }, + { url = "https://files.pythonhosted.org/packages/75/0b/8287b2e5b38c8fe15d7e3398849bb58d382aedc0864ea0fa1820e8630491/coverage-7.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:635adb9a4507c9fd2ed65f39693fa31c9a3ee3a8e6dc64df033e8fdf52a7003f", size = 245311, upload-time = "2025-09-21T20:03:48.19Z" }, + { url = "https://files.pythonhosted.org/packages/0c/1d/29724999984740f0c86d03e6420b942439bf5bd7f54d4382cae386a9d1e9/coverage-7.10.7-cp39-cp39-win32.whl", hash = "sha256:5a02d5a850e2979b0a014c412573953995174743a3f7fa4ea5a6e9a3c5617431", size = 220500, upload-time = "2025-09-21T20:03:50.024Z" }, + { url = "https://files.pythonhosted.org/packages/43/11/4b1e6b129943f905ca54c339f343877b55b365ae2558806c1be4f7476ed5/coverage-7.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:c134869d5ffe34547d14e174c866fd8fe2254918cc0a95e99052903bc1543e07", size = 221408, upload-time = "2025-09-21T20:03:51.803Z" }, + { url = "https://files.pythonhosted.org/packages/ec/16/114df1c291c22cac3b0c127a73e0af5c12ed7bbb6558d310429a0ae24023/coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260", size = 209952, upload-time = "2025-09-21T20:03:53.918Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version < '3.10'" }, +] + +[[package]] +name = "coverage" +version = "7.11.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/38/ee22495420457259d2f3390309505ea98f98a5eed40901cf62196abad006/coverage-7.11.0.tar.gz", hash = "sha256:167bd504ac1ca2af7ff3b81d245dfea0292c5032ebef9d66cc08a7d28c1b8050", size = 811905, upload-time = "2025-10-15T15:15:08.542Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/95/c49df0aceb5507a80b9fe5172d3d39bf23f05be40c23c8d77d556df96cec/coverage-7.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb53f1e8adeeb2e78962bade0c08bfdc461853c7969706ed901821e009b35e31", size = 215800, upload-time = "2025-10-15T15:12:19.824Z" }, + { url = "https://files.pythonhosted.org/packages/dc/c6/7bb46ce01ed634fff1d7bb53a54049f539971862cc388b304ff3c51b4f66/coverage-7.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9a03ec6cb9f40a5c360f138b88266fd8f58408d71e89f536b4f91d85721d075", size = 216198, upload-time = "2025-10-15T15:12:22.549Z" }, + { url = "https://files.pythonhosted.org/packages/94/b2/75d9d8fbf2900268aca5de29cd0a0fe671b0f69ef88be16767cc3c828b85/coverage-7.11.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0d7f0616c557cbc3d1c2090334eddcbb70e1ae3a40b07222d62b3aa47f608fab", size = 242953, upload-time = "2025-10-15T15:12:24.139Z" }, + { url = "https://files.pythonhosted.org/packages/65/ac/acaa984c18f440170525a8743eb4b6c960ace2dbad80dc22056a437fc3c6/coverage-7.11.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e44a86a47bbdf83b0a3ea4d7df5410d6b1a0de984fbd805fa5101f3624b9abe0", size = 244766, upload-time = "2025-10-15T15:12:25.974Z" }, + { url = "https://files.pythonhosted.org/packages/d8/0d/938d0bff76dfa4a6b228c3fc4b3e1c0e2ad4aa6200c141fcda2bd1170227/coverage-7.11.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:596763d2f9a0ee7eec6e643e29660def2eef297e1de0d334c78c08706f1cb785", size = 246625, upload-time = "2025-10-15T15:12:27.387Z" }, + { url = "https://files.pythonhosted.org/packages/38/54/8f5f5e84bfa268df98f46b2cb396b1009734cfb1e5d6adb663d284893b32/coverage-7.11.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ef55537ff511b5e0a43edb4c50a7bf7ba1c3eea20b4f49b1490f1e8e0e42c591", size = 243568, upload-time = "2025-10-15T15:12:28.799Z" }, + { url = "https://files.pythonhosted.org/packages/68/30/8ba337c2877fe3f2e1af0ed7ff4be0c0c4aca44d6f4007040f3ca2255e99/coverage-7.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9cbabd8f4d0d3dc571d77ae5bdbfa6afe5061e679a9d74b6797c48d143307088", size = 244665, upload-time = "2025-10-15T15:12:30.297Z" }, + { url = "https://files.pythonhosted.org/packages/cc/fb/c6f1d6d9a665536b7dde2333346f0cc41dc6a60bd1ffc10cd5c33e7eb000/coverage-7.11.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e24045453384e0ae2a587d562df2a04d852672eb63051d16096d3f08aa4c7c2f", size = 242681, upload-time = "2025-10-15T15:12:32.326Z" }, + { url = "https://files.pythonhosted.org/packages/be/38/1b532319af5f991fa153c20373291dc65c2bf532af7dbcffdeef745c8f79/coverage-7.11.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:7161edd3426c8d19bdccde7d49e6f27f748f3c31cc350c5de7c633fea445d866", size = 242912, upload-time = "2025-10-15T15:12:34.079Z" }, + { url = "https://files.pythonhosted.org/packages/67/3d/f39331c60ef6050d2a861dc1b514fa78f85f792820b68e8c04196ad733d6/coverage-7.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d4ed4de17e692ba6415b0587bc7f12bc80915031fc9db46a23ce70fc88c9841", size = 243559, upload-time = "2025-10-15T15:12:35.809Z" }, + { url = "https://files.pythonhosted.org/packages/4b/55/cb7c9df9d0495036ce582a8a2958d50c23cd73f84a23284bc23bd4711a6f/coverage-7.11.0-cp310-cp310-win32.whl", hash = "sha256:765c0bc8fe46f48e341ef737c91c715bd2a53a12792592296a095f0c237e09cf", size = 218266, upload-time = "2025-10-15T15:12:37.429Z" }, + { url = "https://files.pythonhosted.org/packages/68/a8/b79cb275fa7bd0208767f89d57a1b5f6ba830813875738599741b97c2e04/coverage-7.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:24d6f3128f1b2d20d84b24f4074475457faedc3d4613a7e66b5e769939c7d969", size = 219169, upload-time = "2025-10-15T15:12:39.25Z" }, + { url = "https://files.pythonhosted.org/packages/49/3a/ee1074c15c408ddddddb1db7dd904f6b81bc524e01f5a1c5920e13dbde23/coverage-7.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d58ecaa865c5b9fa56e35efc51d1014d4c0d22838815b9fce57a27dd9576847", size = 215912, upload-time = "2025-10-15T15:12:40.665Z" }, + { url = "https://files.pythonhosted.org/packages/70/c4/9f44bebe5cb15f31608597b037d78799cc5f450044465bcd1ae8cb222fe1/coverage-7.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b679e171f1c104a5668550ada700e3c4937110dbdd153b7ef9055c4f1a1ee3cc", size = 216310, upload-time = "2025-10-15T15:12:42.461Z" }, + { url = "https://files.pythonhosted.org/packages/42/01/5e06077cfef92d8af926bdd86b84fb28bf9bc6ad27343d68be9b501d89f2/coverage-7.11.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ca61691ba8c5b6797deb221a0d09d7470364733ea9c69425a640f1f01b7c5bf0", size = 246706, upload-time = "2025-10-15T15:12:44.001Z" }, + { url = "https://files.pythonhosted.org/packages/40/b8/7a3f1f33b35cc4a6c37e759137533119560d06c0cc14753d1a803be0cd4a/coverage-7.11.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:aef1747ede4bd8ca9cfc04cc3011516500c6891f1b33a94add3253f6f876b7b7", size = 248634, upload-time = "2025-10-15T15:12:45.768Z" }, + { url = "https://files.pythonhosted.org/packages/7a/41/7f987eb33de386bc4c665ab0bf98d15fcf203369d6aacae74f5dd8ec489a/coverage-7.11.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1839d08406e4cba2953dcc0ffb312252f14d7c4c96919f70167611f4dee2623", size = 250741, upload-time = "2025-10-15T15:12:47.222Z" }, + { url = "https://files.pythonhosted.org/packages/23/c1/a4e0ca6a4e83069fb8216b49b30a7352061ca0cb38654bd2dc96b7b3b7da/coverage-7.11.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e0eb0a2dcc62478eb5b4cbb80b97bdee852d7e280b90e81f11b407d0b81c4287", size = 246837, upload-time = "2025-10-15T15:12:48.904Z" }, + { url = "https://files.pythonhosted.org/packages/5d/03/ced062a17f7c38b4728ff76c3acb40d8465634b20b4833cdb3cc3a74e115/coverage-7.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bc1fbea96343b53f65d5351d8fd3b34fd415a2670d7c300b06d3e14a5af4f552", size = 248429, upload-time = "2025-10-15T15:12:50.73Z" }, + { url = "https://files.pythonhosted.org/packages/97/af/a7c6f194bb8c5a2705ae019036b8fe7f49ea818d638eedb15fdb7bed227c/coverage-7.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:214b622259dd0cf435f10241f1333d32caa64dbc27f8790ab693428a141723de", size = 246490, upload-time = "2025-10-15T15:12:52.646Z" }, + { url = "https://files.pythonhosted.org/packages/ab/c3/aab4df02b04a8fde79068c3c41ad7a622b0ef2b12e1ed154da986a727c3f/coverage-7.11.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:258d9967520cca899695d4eb7ea38be03f06951d6ca2f21fb48b1235f791e601", size = 246208, upload-time = "2025-10-15T15:12:54.586Z" }, + { url = "https://files.pythonhosted.org/packages/30/d8/e282ec19cd658238d60ed404f99ef2e45eed52e81b866ab1518c0d4163cf/coverage-7.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cf9e6ff4ca908ca15c157c409d608da77a56a09877b97c889b98fb2c32b6465e", size = 247126, upload-time = "2025-10-15T15:12:56.485Z" }, + { url = "https://files.pythonhosted.org/packages/d1/17/a635fa07fac23adb1a5451ec756216768c2767efaed2e4331710342a3399/coverage-7.11.0-cp311-cp311-win32.whl", hash = "sha256:fcc15fc462707b0680cff6242c48625da7f9a16a28a41bb8fd7a4280920e676c", size = 218314, upload-time = "2025-10-15T15:12:58.365Z" }, + { url = "https://files.pythonhosted.org/packages/2a/29/2ac1dfcdd4ab9a70026edc8d715ece9b4be9a1653075c658ee6f271f394d/coverage-7.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:865965bf955d92790f1facd64fe7ff73551bd2c1e7e6b26443934e9701ba30b9", size = 219203, upload-time = "2025-10-15T15:12:59.902Z" }, + { url = "https://files.pythonhosted.org/packages/03/21/5ce8b3a0133179115af4c041abf2ee652395837cb896614beb8ce8ddcfd9/coverage-7.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:5693e57a065760dcbeb292d60cc4d0231a6d4b6b6f6a3191561e1d5e8820b745", size = 217879, upload-time = "2025-10-15T15:13:01.35Z" }, + { url = "https://files.pythonhosted.org/packages/c4/db/86f6906a7c7edc1a52b2c6682d6dd9be775d73c0dfe2b84f8923dfea5784/coverage-7.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9c49e77811cf9d024b95faf86c3f059b11c0c9be0b0d61bc598f453703bd6fd1", size = 216098, upload-time = "2025-10-15T15:13:02.916Z" }, + { url = "https://files.pythonhosted.org/packages/21/54/e7b26157048c7ba555596aad8569ff903d6cd67867d41b75287323678ede/coverage-7.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a61e37a403a778e2cda2a6a39abcc895f1d984071942a41074b5c7ee31642007", size = 216331, upload-time = "2025-10-15T15:13:04.403Z" }, + { url = "https://files.pythonhosted.org/packages/b9/19/1ce6bf444f858b83a733171306134a0544eaddf1ca8851ede6540a55b2ad/coverage-7.11.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c79cae102bb3b1801e2ef1511fb50e91ec83a1ce466b2c7c25010d884336de46", size = 247825, upload-time = "2025-10-15T15:13:05.92Z" }, + { url = "https://files.pythonhosted.org/packages/71/0b/d3bcbbc259fcced5fb67c5d78f6e7ee965f49760c14afd931e9e663a83b2/coverage-7.11.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:16ce17ceb5d211f320b62df002fa7016b7442ea0fd260c11cec8ce7730954893", size = 250573, upload-time = "2025-10-15T15:13:07.471Z" }, + { url = "https://files.pythonhosted.org/packages/58/8d/b0ff3641a320abb047258d36ed1c21d16be33beed4152628331a1baf3365/coverage-7.11.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80027673e9d0bd6aef86134b0771845e2da85755cf686e7c7c59566cf5a89115", size = 251706, upload-time = "2025-10-15T15:13:09.4Z" }, + { url = "https://files.pythonhosted.org/packages/59/c8/5a586fe8c7b0458053d9c687f5cff515a74b66c85931f7fe17a1c958b4ac/coverage-7.11.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4d3ffa07a08657306cd2215b0da53761c4d73cb54d9143b9303a6481ec0cd415", size = 248221, upload-time = "2025-10-15T15:13:10.964Z" }, + { url = "https://files.pythonhosted.org/packages/d0/ff/3a25e3132804ba44cfa9a778cdf2b73dbbe63ef4b0945e39602fc896ba52/coverage-7.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a3b6a5f8b2524fd6c1066bc85bfd97e78709bb5e37b5b94911a6506b65f47186", size = 249624, upload-time = "2025-10-15T15:13:12.5Z" }, + { url = "https://files.pythonhosted.org/packages/c5/12/ff10c8ce3895e1b17a73485ea79ebc1896a9e466a9d0f4aef63e0d17b718/coverage-7.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fcc0a4aa589de34bc56e1a80a740ee0f8c47611bdfb28cd1849de60660f3799d", size = 247744, upload-time = "2025-10-15T15:13:14.554Z" }, + { url = "https://files.pythonhosted.org/packages/16/02/d500b91f5471b2975947e0629b8980e5e90786fe316b6d7299852c1d793d/coverage-7.11.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:dba82204769d78c3fd31b35c3d5f46e06511936c5019c39f98320e05b08f794d", size = 247325, upload-time = "2025-10-15T15:13:16.438Z" }, + { url = "https://files.pythonhosted.org/packages/77/11/dee0284fbbd9cd64cfce806b827452c6df3f100d9e66188e82dfe771d4af/coverage-7.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:81b335f03ba67309a95210caf3eb43bd6fe75a4e22ba653ef97b4696c56c7ec2", size = 249180, upload-time = "2025-10-15T15:13:17.959Z" }, + { url = "https://files.pythonhosted.org/packages/59/1b/cdf1def928f0a150a057cab03286774e73e29c2395f0d30ce3d9e9f8e697/coverage-7.11.0-cp312-cp312-win32.whl", hash = "sha256:037b2d064c2f8cc8716fe4d39cb705779af3fbf1ba318dc96a1af858888c7bb5", size = 218479, upload-time = "2025-10-15T15:13:19.608Z" }, + { url = "https://files.pythonhosted.org/packages/ff/55/e5884d55e031da9c15b94b90a23beccc9d6beee65e9835cd6da0a79e4f3a/coverage-7.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:d66c0104aec3b75e5fd897e7940188ea1892ca1d0235316bf89286d6a22568c0", size = 219290, upload-time = "2025-10-15T15:13:21.593Z" }, + { url = "https://files.pythonhosted.org/packages/23/a8/faa930cfc71c1d16bc78f9a19bb73700464f9c331d9e547bfbc1dbd3a108/coverage-7.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:d91ebeac603812a09cf6a886ba6e464f3bbb367411904ae3790dfe28311b15ad", size = 217924, upload-time = "2025-10-15T15:13:23.39Z" }, + { url = "https://files.pythonhosted.org/packages/60/7f/85e4dfe65e400645464b25c036a26ac226cf3a69d4a50c3934c532491cdd/coverage-7.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cc3f49e65ea6e0d5d9bd60368684fe52a704d46f9e7fc413918f18d046ec40e1", size = 216129, upload-time = "2025-10-15T15:13:25.371Z" }, + { url = "https://files.pythonhosted.org/packages/96/5d/dc5fa98fea3c175caf9d360649cb1aa3715e391ab00dc78c4c66fabd7356/coverage-7.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f39ae2f63f37472c17b4990f794035c9890418b1b8cca75c01193f3c8d3e01be", size = 216380, upload-time = "2025-10-15T15:13:26.976Z" }, + { url = "https://files.pythonhosted.org/packages/b2/f5/3da9cc9596708273385189289c0e4d8197d37a386bdf17619013554b3447/coverage-7.11.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7db53b5cdd2917b6eaadd0b1251cf4e7d96f4a8d24e174bdbdf2f65b5ea7994d", size = 247375, upload-time = "2025-10-15T15:13:28.923Z" }, + { url = "https://files.pythonhosted.org/packages/65/6c/f7f59c342359a235559d2bc76b0c73cfc4bac7d61bb0df210965cb1ecffd/coverage-7.11.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10ad04ac3a122048688387828b4537bc9cf60c0bf4869c1e9989c46e45690b82", size = 249978, upload-time = "2025-10-15T15:13:30.525Z" }, + { url = "https://files.pythonhosted.org/packages/e7/8c/042dede2e23525e863bf1ccd2b92689692a148d8b5fd37c37899ba882645/coverage-7.11.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4036cc9c7983a2b1f2556d574d2eb2154ac6ed55114761685657e38782b23f52", size = 251253, upload-time = "2025-10-15T15:13:32.174Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a9/3c58df67bfa809a7bddd786356d9c5283e45d693edb5f3f55d0986dd905a/coverage-7.11.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ab934dd13b1c5e94b692b1e01bd87e4488cb746e3a50f798cb9464fd128374b", size = 247591, upload-time = "2025-10-15T15:13:34.147Z" }, + { url = "https://files.pythonhosted.org/packages/26/5b/c7f32efd862ee0477a18c41e4761305de6ddd2d49cdeda0c1116227570fd/coverage-7.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59a6e5a265f7cfc05f76e3bb53eca2e0dfe90f05e07e849930fecd6abb8f40b4", size = 249411, upload-time = "2025-10-15T15:13:38.425Z" }, + { url = "https://files.pythonhosted.org/packages/76/b5/78cb4f1e86c1611431c990423ec0768122905b03837e1b4c6a6f388a858b/coverage-7.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:df01d6c4c81e15a7c88337b795bb7595a8596e92310266b5072c7e301168efbd", size = 247303, upload-time = "2025-10-15T15:13:40.464Z" }, + { url = "https://files.pythonhosted.org/packages/87/c9/23c753a8641a330f45f221286e707c427e46d0ffd1719b080cedc984ec40/coverage-7.11.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:8c934bd088eed6174210942761e38ee81d28c46de0132ebb1801dbe36a390dcc", size = 247157, upload-time = "2025-10-15T15:13:42.087Z" }, + { url = "https://files.pythonhosted.org/packages/c5/42/6e0cc71dc8a464486e944a4fa0d85bdec031cc2969e98ed41532a98336b9/coverage-7.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a03eaf7ec24078ad64a07f02e30060aaf22b91dedf31a6b24d0d98d2bba7f48", size = 248921, upload-time = "2025-10-15T15:13:43.715Z" }, + { url = "https://files.pythonhosted.org/packages/e8/1c/743c2ef665e6858cccb0f84377dfe3a4c25add51e8c7ef19249be92465b6/coverage-7.11.0-cp313-cp313-win32.whl", hash = "sha256:695340f698a5f56f795b2836abe6fb576e7c53d48cd155ad2f80fd24bc63a040", size = 218526, upload-time = "2025-10-15T15:13:45.336Z" }, + { url = "https://files.pythonhosted.org/packages/ff/d5/226daadfd1bf8ddbccefbd3aa3547d7b960fb48e1bdac124e2dd13a2b71a/coverage-7.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:2727d47fce3ee2bac648528e41455d1b0c46395a087a229deac75e9f88ba5a05", size = 219317, upload-time = "2025-10-15T15:13:47.401Z" }, + { url = "https://files.pythonhosted.org/packages/97/54/47db81dcbe571a48a298f206183ba8a7ba79200a37cd0d9f4788fcd2af4a/coverage-7.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:0efa742f431529699712b92ecdf22de8ff198df41e43aeaaadf69973eb93f17a", size = 217948, upload-time = "2025-10-15T15:13:49.096Z" }, + { url = "https://files.pythonhosted.org/packages/e5/8b/cb68425420154e7e2a82fd779a8cc01549b6fa83c2ad3679cd6c088ebd07/coverage-7.11.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:587c38849b853b157706407e9ebdca8fd12f45869edb56defbef2daa5fb0812b", size = 216837, upload-time = "2025-10-15T15:13:51.09Z" }, + { url = "https://files.pythonhosted.org/packages/33/55/9d61b5765a025685e14659c8d07037247de6383c0385757544ffe4606475/coverage-7.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b971bdefdd75096163dd4261c74be813c4508477e39ff7b92191dea19f24cd37", size = 217061, upload-time = "2025-10-15T15:13:52.747Z" }, + { url = "https://files.pythonhosted.org/packages/52/85/292459c9186d70dcec6538f06ea251bc968046922497377bf4a1dc9a71de/coverage-7.11.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:269bfe913b7d5be12ab13a95f3a76da23cf147be7fa043933320ba5625f0a8de", size = 258398, upload-time = "2025-10-15T15:13:54.45Z" }, + { url = "https://files.pythonhosted.org/packages/1f/e2/46edd73fb8bf51446c41148d81944c54ed224854812b6ca549be25113ee0/coverage-7.11.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:dadbcce51a10c07b7c72b0ce4a25e4b6dcb0c0372846afb8e5b6307a121eb99f", size = 260574, upload-time = "2025-10-15T15:13:56.145Z" }, + { url = "https://files.pythonhosted.org/packages/07/5e/1df469a19007ff82e2ca8fe509822820a31e251f80ee7344c34f6cd2ec43/coverage-7.11.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ed43fa22c6436f7957df036331f8fe4efa7af132054e1844918866cd228af6c", size = 262797, upload-time = "2025-10-15T15:13:58.635Z" }, + { url = "https://files.pythonhosted.org/packages/f9/50/de216b31a1434b94d9b34a964c09943c6be45069ec704bfc379d8d89a649/coverage-7.11.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9516add7256b6713ec08359b7b05aeff8850c98d357784c7205b2e60aa2513fa", size = 257361, upload-time = "2025-10-15T15:14:00.409Z" }, + { url = "https://files.pythonhosted.org/packages/82/1e/3f9f8344a48111e152e0fd495b6fff13cc743e771a6050abf1627a7ba918/coverage-7.11.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb92e47c92fcbcdc692f428da67db33337fa213756f7adb6a011f7b5a7a20740", size = 260349, upload-time = "2025-10-15T15:14:02.188Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/3f52741f9e7d82124272f3070bbe316006a7de1bad1093f88d59bfc6c548/coverage-7.11.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d06f4fc7acf3cabd6d74941d53329e06bab00a8fe10e4df2714f0b134bfc64ef", size = 258114, upload-time = "2025-10-15T15:14:03.907Z" }, + { url = "https://files.pythonhosted.org/packages/0b/8b/918f0e15f0365d50d3986bbd3338ca01178717ac5678301f3f547b6619e6/coverage-7.11.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:6fbcee1a8f056af07ecd344482f711f563a9eb1c2cad192e87df00338ec3cdb0", size = 256723, upload-time = "2025-10-15T15:14:06.324Z" }, + { url = "https://files.pythonhosted.org/packages/44/9e/7776829f82d3cf630878a7965a7d70cc6ca94f22c7d20ec4944f7148cb46/coverage-7.11.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dbbf012be5f32533a490709ad597ad8a8ff80c582a95adc8d62af664e532f9ca", size = 259238, upload-time = "2025-10-15T15:14:08.002Z" }, + { url = "https://files.pythonhosted.org/packages/9a/b8/49cf253e1e7a3bedb85199b201862dd7ca4859f75b6cf25ffa7298aa0760/coverage-7.11.0-cp313-cp313t-win32.whl", hash = "sha256:cee6291bb4fed184f1c2b663606a115c743df98a537c969c3c64b49989da96c2", size = 219180, upload-time = "2025-10-15T15:14:09.786Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e1/1a541703826be7ae2125a0fb7f821af5729d56bb71e946e7b933cc7a89a4/coverage-7.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a386c1061bf98e7ea4758e4313c0ab5ecf57af341ef0f43a0bf26c2477b5c268", size = 220241, upload-time = "2025-10-15T15:14:11.471Z" }, + { url = "https://files.pythonhosted.org/packages/d5/d1/5ee0e0a08621140fd418ec4020f595b4d52d7eb429ae6a0c6542b4ba6f14/coverage-7.11.0-cp313-cp313t-win_arm64.whl", hash = "sha256:f9ea02ef40bb83823b2b04964459d281688fe173e20643870bb5d2edf68bc836", size = 218510, upload-time = "2025-10-15T15:14:13.46Z" }, + { url = "https://files.pythonhosted.org/packages/f4/06/e923830c1985ce808e40a3fa3eb46c13350b3224b7da59757d37b6ce12b8/coverage-7.11.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c770885b28fb399aaf2a65bbd1c12bf6f307ffd112d6a76c5231a94276f0c497", size = 216110, upload-time = "2025-10-15T15:14:15.157Z" }, + { url = "https://files.pythonhosted.org/packages/42/82/cdeed03bfead45203fb651ed756dfb5266028f5f939e7f06efac4041dad5/coverage-7.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a3d0e2087dba64c86a6b254f43e12d264b636a39e88c5cc0a01a7c71bcfdab7e", size = 216395, upload-time = "2025-10-15T15:14:16.863Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ba/e1c80caffc3199aa699813f73ff097bc2df7b31642bdbc7493600a8f1de5/coverage-7.11.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:73feb83bb41c32811973b8565f3705caf01d928d972b72042b44e97c71fd70d1", size = 247433, upload-time = "2025-10-15T15:14:18.589Z" }, + { url = "https://files.pythonhosted.org/packages/80/c0/5b259b029694ce0a5bbc1548834c7ba3db41d3efd3474489d7efce4ceb18/coverage-7.11.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c6f31f281012235ad08f9a560976cc2fc9c95c17604ff3ab20120fe480169bca", size = 249970, upload-time = "2025-10-15T15:14:20.307Z" }, + { url = "https://files.pythonhosted.org/packages/8c/86/171b2b5e1aac7e2fd9b43f7158b987dbeb95f06d1fbecad54ad8163ae3e8/coverage-7.11.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9570ad567f880ef675673992222746a124b9595506826b210fbe0ce3f0499cd", size = 251324, upload-time = "2025-10-15T15:14:22.419Z" }, + { url = "https://files.pythonhosted.org/packages/1a/7e/7e10414d343385b92024af3932a27a1caf75c6e27ee88ba211221ff1a145/coverage-7.11.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8badf70446042553a773547a61fecaa734b55dc738cacf20c56ab04b77425e43", size = 247445, upload-time = "2025-10-15T15:14:24.205Z" }, + { url = "https://files.pythonhosted.org/packages/c4/3b/e4f966b21f5be8c4bf86ad75ae94efa0de4c99c7bbb8114476323102e345/coverage-7.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a09c1211959903a479e389685b7feb8a17f59ec5a4ef9afde7650bd5eabc2777", size = 249324, upload-time = "2025-10-15T15:14:26.234Z" }, + { url = "https://files.pythonhosted.org/packages/00/a2/8479325576dfcd909244d0df215f077f47437ab852ab778cfa2f8bf4d954/coverage-7.11.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:5ef83b107f50db3f9ae40f69e34b3bd9337456c5a7fe3461c7abf8b75dd666a2", size = 247261, upload-time = "2025-10-15T15:14:28.42Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d8/3a9e2db19d94d65771d0f2e21a9ea587d11b831332a73622f901157cc24b/coverage-7.11.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:f91f927a3215b8907e214af77200250bb6aae36eca3f760f89780d13e495388d", size = 247092, upload-time = "2025-10-15T15:14:30.784Z" }, + { url = "https://files.pythonhosted.org/packages/b3/b1/bbca3c472544f9e2ad2d5116b2379732957048be4b93a9c543fcd0207e5f/coverage-7.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cdbcd376716d6b7fbfeedd687a6c4be019c5a5671b35f804ba76a4c0a778cba4", size = 248755, upload-time = "2025-10-15T15:14:32.585Z" }, + { url = "https://files.pythonhosted.org/packages/89/49/638d5a45a6a0f00af53d6b637c87007eb2297042186334e9923a61aa8854/coverage-7.11.0-cp314-cp314-win32.whl", hash = "sha256:bab7ec4bb501743edc63609320aaec8cd9188b396354f482f4de4d40a9d10721", size = 218793, upload-time = "2025-10-15T15:14:34.972Z" }, + { url = "https://files.pythonhosted.org/packages/30/cc/b675a51f2d068adb3cdf3799212c662239b0ca27f4691d1fff81b92ea850/coverage-7.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:3d4ba9a449e9364a936a27322b20d32d8b166553bfe63059bd21527e681e2fad", size = 219587, upload-time = "2025-10-15T15:14:37.047Z" }, + { url = "https://files.pythonhosted.org/packages/93/98/5ac886876026de04f00820e5094fe22166b98dcb8b426bf6827aaf67048c/coverage-7.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:ce37f215223af94ef0f75ac68ea096f9f8e8c8ec7d6e8c346ee45c0d363f0479", size = 218168, upload-time = "2025-10-15T15:14:38.861Z" }, + { url = "https://files.pythonhosted.org/packages/14/d1/b4145d35b3e3ecf4d917e97fc8895bcf027d854879ba401d9ff0f533f997/coverage-7.11.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:f413ce6e07e0d0dc9c433228727b619871532674b45165abafe201f200cc215f", size = 216850, upload-time = "2025-10-15T15:14:40.651Z" }, + { url = "https://files.pythonhosted.org/packages/ca/d1/7f645fc2eccd318369a8a9948acc447bb7c1ade2911e31d3c5620544c22b/coverage-7.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:05791e528a18f7072bf5998ba772fe29db4da1234c45c2087866b5ba4dea710e", size = 217071, upload-time = "2025-10-15T15:14:42.755Z" }, + { url = "https://files.pythonhosted.org/packages/54/7d/64d124649db2737ceced1dfcbdcb79898d5868d311730f622f8ecae84250/coverage-7.11.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cacb29f420cfeb9283b803263c3b9a068924474ff19ca126ba9103e1278dfa44", size = 258570, upload-time = "2025-10-15T15:14:44.542Z" }, + { url = "https://files.pythonhosted.org/packages/6c/3f/6f5922f80dc6f2d8b2c6f974835c43f53eb4257a7797727e6ca5b7b2ec1f/coverage-7.11.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314c24e700d7027ae3ab0d95fbf8d53544fca1f20345fd30cd219b737c6e58d3", size = 260738, upload-time = "2025-10-15T15:14:46.436Z" }, + { url = "https://files.pythonhosted.org/packages/0e/5f/9e883523c4647c860b3812b417a2017e361eca5b635ee658387dc11b13c1/coverage-7.11.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:630d0bd7a293ad2fc8b4b94e5758c8b2536fdf36c05f1681270203e463cbfa9b", size = 262994, upload-time = "2025-10-15T15:14:48.3Z" }, + { url = "https://files.pythonhosted.org/packages/07/bb/43b5a8e94c09c8bf51743ffc65c4c841a4ca5d3ed191d0a6919c379a1b83/coverage-7.11.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e89641f5175d65e2dbb44db15fe4ea48fade5d5bbb9868fdc2b4fce22f4a469d", size = 257282, upload-time = "2025-10-15T15:14:50.236Z" }, + { url = "https://files.pythonhosted.org/packages/aa/e5/0ead8af411411330b928733e1d201384b39251a5f043c1612970310e8283/coverage-7.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c9f08ea03114a637dab06cedb2e914da9dc67fa52c6015c018ff43fdde25b9c2", size = 260430, upload-time = "2025-10-15T15:14:52.413Z" }, + { url = "https://files.pythonhosted.org/packages/ae/66/03dd8bb0ba5b971620dcaac145461950f6d8204953e535d2b20c6b65d729/coverage-7.11.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce9f3bde4e9b031eaf1eb61df95c1401427029ea1bfddb8621c1161dcb0fa02e", size = 258190, upload-time = "2025-10-15T15:14:54.268Z" }, + { url = "https://files.pythonhosted.org/packages/45/ae/28a9cce40bf3174426cb2f7e71ee172d98e7f6446dff936a7ccecee34b14/coverage-7.11.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:e4dc07e95495923d6fd4d6c27bf70769425b71c89053083843fd78f378558996", size = 256658, upload-time = "2025-10-15T15:14:56.436Z" }, + { url = "https://files.pythonhosted.org/packages/5c/7c/3a44234a8599513684bfc8684878fd7b126c2760f79712bb78c56f19efc4/coverage-7.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:424538266794db2861db4922b05d729ade0940ee69dcf0591ce8f69784db0e11", size = 259342, upload-time = "2025-10-15T15:14:58.538Z" }, + { url = "https://files.pythonhosted.org/packages/e1/e6/0108519cba871af0351725ebdb8660fd7a0fe2ba3850d56d32490c7d9b4b/coverage-7.11.0-cp314-cp314t-win32.whl", hash = "sha256:4c1eeb3fb8eb9e0190bebafd0462936f75717687117339f708f395fe455acc73", size = 219568, upload-time = "2025-10-15T15:15:00.382Z" }, + { url = "https://files.pythonhosted.org/packages/c9/76/44ba876e0942b4e62fdde23ccb029ddb16d19ba1bef081edd00857ba0b16/coverage-7.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b56efee146c98dbf2cf5cffc61b9829d1e94442df4d7398b26892a53992d3547", size = 220687, upload-time = "2025-10-15T15:15:02.322Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0c/0df55ecb20d0d0ed5c322e10a441775e1a3a5d78c60f0c4e1abfe6fcf949/coverage-7.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:b5c2705afa83f49bd91962a4094b6b082f94aef7626365ab3f8f4bd159c5acf3", size = 218711, upload-time = "2025-10-15T15:15:04.575Z" }, + { url = "https://files.pythonhosted.org/packages/5f/04/642c1d8a448ae5ea1369eac8495740a79eb4e581a9fb0cbdce56bbf56da1/coverage-7.11.0-py3-none-any.whl", hash = "sha256:4b7589765348d78fb4e5fb6ea35d07564e387da2fc5efff62e0222971f155f68", size = 207761, upload-time = "2025-10-15T15:15:06.439Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version >= '3.10' and python_full_version <= '3.11'" }, +] + +[[package]] +name = "cryptography" +version = "46.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, + { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, + { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, + { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, + { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, + { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, + { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, + { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, + { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, + { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, + { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" }, + { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" }, + { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" }, + { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" }, + { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" }, + { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" }, + { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" }, + { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" }, + { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" }, + { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" }, + { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" }, + { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" }, + { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" }, + { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" }, + { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, + { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, + { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, + { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, + { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, + { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, + { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, + { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, + { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, + { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, + { url = "https://files.pythonhosted.org/packages/d9/cd/1a8633802d766a0fa46f382a77e096d7e209e0817892929655fe0586ae32/cryptography-46.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a23582810fedb8c0bc47524558fb6c56aac3fc252cb306072fd2815da2a47c32", size = 3689163, upload-time = "2025-10-15T23:18:13.821Z" }, + { url = "https://files.pythonhosted.org/packages/4c/59/6b26512964ace6480c3e54681a9859c974172fb141c38df11eadd8416947/cryptography-46.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e7aec276d68421f9574040c26e2a7c3771060bc0cff408bae1dcb19d3ab1e63c", size = 3429474, upload-time = "2025-10-15T23:18:15.477Z" }, + { url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132, upload-time = "2025-10-15T23:18:17.056Z" }, + { url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" }, + { url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" }, + { url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" }, + { url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" }, +] + +[[package]] +name = "docutils" +version = "0.22.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4a/c0/89fe6215b443b919cb98a5002e107cb5026854ed1ccb6b5833e0768419d1/docutils-0.22.2.tar.gz", hash = "sha256:9fdb771707c8784c8f2728b67cb2c691305933d68137ef95a75db5f4dfbc213d", size = 2289092, upload-time = "2025-09-20T17:55:47.994Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/66/dd/f95350e853a4468ec37478414fc04ae2d61dad7a947b3015c3dcc51a09b9/docutils-0.22.2-py3-none-any.whl", hash = "sha256:b0e98d679283fc3bb0ead8a5da7f501baa632654e7056e9c5846842213d674d8", size = 632667, upload-time = "2025-09-20T17:55:43.052Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "id" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/22/11/102da08f88412d875fa2f1a9a469ff7ad4c874b0ca6fed0048fe385bdb3d/id-1.5.0.tar.gz", hash = "sha256:292cb8a49eacbbdbce97244f47a97b4c62540169c976552e497fd57df0734c1d", size = 15237, upload-time = "2024-12-04T19:53:05.575Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9f/cb/18326d2d89ad3b0dd143da971e77afd1e6ca6674f1b1c3df4b6bec6279fc/id-1.5.0-py3-none-any.whl", hash = "sha256:f1434e1cef91f2cbb8a4ec64663d5a23b9ed43ef44c4c957d02583d61714c658", size = 13611, upload-time = "2024-12-04T19:53:03.02Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "jaraco-classes" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "more-itertools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/c0/ed4a27bc5571b99e3cff68f8a9fa5b56ff7df1c2251cc715a652ddd26402/jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd", size = 11780, upload-time = "2024-03-31T07:27:36.643Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/66/b15ce62552d84bbfcec9a4873ab79d993a1dd4edb922cbfccae192bd5b5f/jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790", size = 6777, upload-time = "2024-03-31T07:27:34.792Z" }, +] + +[[package]] +name = "jaraco-context" +version = "6.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-tarfile", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/ad/f3777b81bf0b6e7bc7514a1656d3e637b2e8e15fab2ce3235730b3e7a4e6/jaraco_context-6.0.1.tar.gz", hash = "sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3", size = 13912, upload-time = "2024-08-20T03:39:27.358Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/db/0c52c4cf5e4bd9f5d7135ec7669a3a767af21b3a308e1ed3674881e52b62/jaraco.context-6.0.1-py3-none-any.whl", hash = "sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4", size = 6825, upload-time = "2024-08-20T03:39:25.966Z" }, +] + +[[package]] +name = "jaraco-functools" +version = "4.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "more-itertools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f7/ed/1aa2d585304ec07262e1a83a9889880701079dde796ac7b1d1826f40c63d/jaraco_functools-4.3.0.tar.gz", hash = "sha256:cfd13ad0dd2c47a3600b439ef72d8615d482cedcff1632930d6f28924d92f294", size = 19755, upload-time = "2025-08-18T20:05:09.91Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b4/09/726f168acad366b11e420df31bf1c702a54d373a83f968d94141a8c3fde0/jaraco_functools-4.3.0-py3-none-any.whl", hash = "sha256:227ff8ed6f7b8f62c56deff101545fa7543cf2c8e7b82a7c2116e672f29c26e8", size = 10408, upload-time = "2025-08-18T20:05:08.69Z" }, +] + +[[package]] +name = "jeepney" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/6f/357efd7602486741aa73ffc0617fb310a29b588ed0fd69c2399acbb85b0c/jeepney-0.9.0.tar.gz", hash = "sha256:cf0e9e845622b81e4a28df94c40345400256ec608d0e55bb8a3feaa9163f5732", size = 106758, upload-time = "2025-02-27T18:51:01.684Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/a3/e137168c9c44d18eff0376253da9f1e9234d0239e0ee230d2fee6cea8e55/jeepney-0.9.0-py3-none-any.whl", hash = "sha256:97e5714520c16fc0a45695e5365a2e11b81ea79bba796e26f9f1d178cb182683", size = 49010, upload-time = "2025-02-27T18:51:00.104Z" }, +] + +[[package]] +name = "keyring" +version = "25.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata", marker = "python_full_version < '3.12'" }, + { name = "jaraco-classes" }, + { name = "jaraco-context" }, + { name = "jaraco-functools" }, + { name = "jeepney", marker = "sys_platform == 'linux'" }, + { name = "pywin32-ctypes", marker = "sys_platform == 'win32'" }, + { name = "secretstorage", version = "3.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10' and sys_platform == 'linux'" }, + { name = "secretstorage", version = "3.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10' and sys_platform == 'linux'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/70/09/d904a6e96f76ff214be59e7aa6ef7190008f52a0ab6689760a98de0bf37d/keyring-25.6.0.tar.gz", hash = "sha256:0b39998aa941431eb3d9b0d4b2460bc773b9df6fed7621c2dfb291a7e0187a66", size = 62750, upload-time = "2024-12-25T15:26:45.782Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d3/32/da7f44bcb1105d3e88a0b74ebdca50c59121d2ddf71c9e34ba47df7f3a56/keyring-25.6.0-py3-none-any.whl", hash = "sha256:552a3f7af126ece7ed5c89753650eec89c7eaae8617d0aa4d9ad2b75111266bd", size = 39085, upload-time = "2024-12-25T15:26:44.377Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "mdurl", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +dependencies = [ + { name = "mdurl", marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "more-itertools" +version = "10.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/5d/38b681d3fce7a266dd9ab73c66959406d565b3e85f21d5e66e1181d93721/more_itertools-10.8.0.tar.gz", hash = "sha256:f638ddf8a1a0d134181275fb5d58b086ead7c6a72429ad725c67503f13ba30bd", size = 137431, upload-time = "2025-09-02T15:23:11.018Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/8e/469e5a4a2f5855992e425f3cb33804cc07bf18d48f2db061aec61ce50270/more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b", size = 69667, upload-time = "2025-09-02T15:23:09.635Z" }, +] + +[[package]] +name = "mypy" +version = "1.18.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c0/77/8f0d0001ffad290cef2f7f216f96c814866248a0b92a722365ed54648e7e/mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b", size = 3448846, upload-time = "2025-09-19T00:11:10.519Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/6f/657961a0743cff32e6c0611b63ff1c1970a0b482ace35b069203bf705187/mypy-1.18.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eab0cf6294dafe397c261a75f96dc2c31bffe3b944faa24db5def4e2b0f77c", size = 12807973, upload-time = "2025-09-19T00:10:35.282Z" }, + { url = "https://files.pythonhosted.org/packages/10/e9/420822d4f661f13ca8900f5fa239b40ee3be8b62b32f3357df9a3045a08b/mypy-1.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a780ca61fc239e4865968ebc5240bb3bf610ef59ac398de9a7421b54e4a207e", size = 11896527, upload-time = "2025-09-19T00:10:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/aa/73/a05b2bbaa7005f4642fcfe40fb73f2b4fb6bb44229bd585b5878e9a87ef8/mypy-1.18.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:448acd386266989ef11662ce3c8011fd2a7b632e0ec7d61a98edd8e27472225b", size = 12507004, upload-time = "2025-09-19T00:11:05.411Z" }, + { url = "https://files.pythonhosted.org/packages/4f/01/f6e4b9f0d031c11ccbd6f17da26564f3a0f3c4155af344006434b0a05a9d/mypy-1.18.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f9e171c465ad3901dc652643ee4bffa8e9fef4d7d0eece23b428908c77a76a66", size = 13245947, upload-time = "2025-09-19T00:10:46.923Z" }, + { url = "https://files.pythonhosted.org/packages/d7/97/19727e7499bfa1ae0773d06afd30ac66a58ed7437d940c70548634b24185/mypy-1.18.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:592ec214750bc00741af1f80cbf96b5013d81486b7bb24cb052382c19e40b428", size = 13499217, upload-time = "2025-09-19T00:09:39.472Z" }, + { url = "https://files.pythonhosted.org/packages/9f/4f/90dc8c15c1441bf31cf0f9918bb077e452618708199e530f4cbd5cede6ff/mypy-1.18.2-cp310-cp310-win_amd64.whl", hash = "sha256:7fb95f97199ea11769ebe3638c29b550b5221e997c63b14ef93d2e971606ebed", size = 9766753, upload-time = "2025-09-19T00:10:49.161Z" }, + { url = "https://files.pythonhosted.org/packages/88/87/cafd3ae563f88f94eec33f35ff722d043e09832ea8530ef149ec1efbaf08/mypy-1.18.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:807d9315ab9d464125aa9fcf6d84fde6e1dc67da0b6f80e7405506b8ac72bc7f", size = 12731198, upload-time = "2025-09-19T00:09:44.857Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e0/1e96c3d4266a06d4b0197ace5356d67d937d8358e2ee3ffac71faa843724/mypy-1.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:776bb00de1778caf4db739c6e83919c1d85a448f71979b6a0edd774ea8399341", size = 11817879, upload-time = "2025-09-19T00:09:47.131Z" }, + { url = "https://files.pythonhosted.org/packages/72/ef/0c9ba89eb03453e76bdac5a78b08260a848c7bfc5d6603634774d9cd9525/mypy-1.18.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1379451880512ffce14505493bd9fe469e0697543717298242574882cf8cdb8d", size = 12427292, upload-time = "2025-09-19T00:10:22.472Z" }, + { url = "https://files.pythonhosted.org/packages/1a/52/ec4a061dd599eb8179d5411d99775bec2a20542505988f40fc2fee781068/mypy-1.18.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1331eb7fd110d60c24999893320967594ff84c38ac6d19e0a76c5fd809a84c86", size = 13163750, upload-time = "2025-09-19T00:09:51.472Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5f/2cf2ceb3b36372d51568f2208c021870fe7834cf3186b653ac6446511839/mypy-1.18.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ca30b50a51e7ba93b00422e486cbb124f1c56a535e20eff7b2d6ab72b3b2e37", size = 13351827, upload-time = "2025-09-19T00:09:58.311Z" }, + { url = "https://files.pythonhosted.org/packages/c8/7d/2697b930179e7277529eaaec1513f8de622818696857f689e4a5432e5e27/mypy-1.18.2-cp311-cp311-win_amd64.whl", hash = "sha256:664dc726e67fa54e14536f6e1224bcfce1d9e5ac02426d2326e2bb4e081d1ce8", size = 9757983, upload-time = "2025-09-19T00:10:09.071Z" }, + { url = "https://files.pythonhosted.org/packages/07/06/dfdd2bc60c66611dd8335f463818514733bc763e4760dee289dcc33df709/mypy-1.18.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33eca32dd124b29400c31d7cf784e795b050ace0e1f91b8dc035672725617e34", size = 12908273, upload-time = "2025-09-19T00:10:58.321Z" }, + { url = "https://files.pythonhosted.org/packages/81/14/6a9de6d13a122d5608e1a04130724caf9170333ac5a924e10f670687d3eb/mypy-1.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3c47adf30d65e89b2dcd2fa32f3aeb5e94ca970d2c15fcb25e297871c8e4764", size = 11920910, upload-time = "2025-09-19T00:10:20.043Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a9/b29de53e42f18e8cc547e38daa9dfa132ffdc64f7250e353f5c8cdd44bee/mypy-1.18.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d6c838e831a062f5f29d11c9057c6009f60cb294fea33a98422688181fe2893", size = 12465585, upload-time = "2025-09-19T00:10:33.005Z" }, + { url = "https://files.pythonhosted.org/packages/77/ae/6c3d2c7c61ff21f2bee938c917616c92ebf852f015fb55917fd6e2811db2/mypy-1.18.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01199871b6110a2ce984bde85acd481232d17413868c9807e95c1b0739a58914", size = 13348562, upload-time = "2025-09-19T00:10:11.51Z" }, + { url = "https://files.pythonhosted.org/packages/4d/31/aec68ab3b4aebdf8f36d191b0685d99faa899ab990753ca0fee60fb99511/mypy-1.18.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a2afc0fa0b0e91b4599ddfe0f91e2c26c2b5a5ab263737e998d6817874c5f7c8", size = 13533296, upload-time = "2025-09-19T00:10:06.568Z" }, + { url = "https://files.pythonhosted.org/packages/9f/83/abcb3ad9478fca3ebeb6a5358bb0b22c95ea42b43b7789c7fb1297ca44f4/mypy-1.18.2-cp312-cp312-win_amd64.whl", hash = "sha256:d8068d0afe682c7c4897c0f7ce84ea77f6de953262b12d07038f4d296d547074", size = 9828828, upload-time = "2025-09-19T00:10:28.203Z" }, + { url = "https://files.pythonhosted.org/packages/5f/04/7f462e6fbba87a72bc8097b93f6842499c428a6ff0c81dd46948d175afe8/mypy-1.18.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:07b8b0f580ca6d289e69209ec9d3911b4a26e5abfde32228a288eb79df129fcc", size = 12898728, upload-time = "2025-09-19T00:10:01.33Z" }, + { url = "https://files.pythonhosted.org/packages/99/5b/61ed4efb64f1871b41fd0b82d29a64640f3516078f6c7905b68ab1ad8b13/mypy-1.18.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ed4482847168439651d3feee5833ccedbf6657e964572706a2adb1f7fa4dfe2e", size = 11910758, upload-time = "2025-09-19T00:10:42.607Z" }, + { url = "https://files.pythonhosted.org/packages/3c/46/d297d4b683cc89a6e4108c4250a6a6b717f5fa96e1a30a7944a6da44da35/mypy-1.18.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3ad2afadd1e9fea5cf99a45a822346971ede8685cc581ed9cd4d42eaf940986", size = 12475342, upload-time = "2025-09-19T00:11:00.371Z" }, + { url = "https://files.pythonhosted.org/packages/83/45/4798f4d00df13eae3bfdf726c9244bcb495ab5bd588c0eed93a2f2dd67f3/mypy-1.18.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a431a6f1ef14cf8c144c6b14793a23ec4eae3db28277c358136e79d7d062f62d", size = 13338709, upload-time = "2025-09-19T00:11:03.358Z" }, + { url = "https://files.pythonhosted.org/packages/d7/09/479f7358d9625172521a87a9271ddd2441e1dab16a09708f056e97007207/mypy-1.18.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7ab28cc197f1dd77a67e1c6f35cd1f8e8b73ed2217e4fc005f9e6a504e46e7ba", size = 13529806, upload-time = "2025-09-19T00:10:26.073Z" }, + { url = "https://files.pythonhosted.org/packages/71/cf/ac0f2c7e9d0ea3c75cd99dff7aec1c9df4a1376537cb90e4c882267ee7e9/mypy-1.18.2-cp313-cp313-win_amd64.whl", hash = "sha256:0e2785a84b34a72ba55fb5daf079a1003a34c05b22238da94fcae2bbe46f3544", size = 9833262, upload-time = "2025-09-19T00:10:40.035Z" }, + { url = "https://files.pythonhosted.org/packages/5a/0c/7d5300883da16f0063ae53996358758b2a2df2a09c72a5061fa79a1f5006/mypy-1.18.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:62f0e1e988ad41c2a110edde6c398383a889d95b36b3e60bcf155f5164c4fdce", size = 12893775, upload-time = "2025-09-19T00:10:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/50/df/2cffbf25737bdb236f60c973edf62e3e7b4ee1c25b6878629e88e2cde967/mypy-1.18.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8795a039bab805ff0c1dfdb8cd3344642c2b99b8e439d057aba30850b8d3423d", size = 11936852, upload-time = "2025-09-19T00:10:51.631Z" }, + { url = "https://files.pythonhosted.org/packages/be/50/34059de13dd269227fb4a03be1faee6e2a4b04a2051c82ac0a0b5a773c9a/mypy-1.18.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ca1e64b24a700ab5ce10133f7ccd956a04715463d30498e64ea8715236f9c9c", size = 12480242, upload-time = "2025-09-19T00:11:07.955Z" }, + { url = "https://files.pythonhosted.org/packages/5b/11/040983fad5132d85914c874a2836252bbc57832065548885b5bb5b0d4359/mypy-1.18.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924eef3795cc89fecf6bedc6ed32b33ac13e8321344f6ddbf8ee89f706c05cb", size = 13326683, upload-time = "2025-09-19T00:09:55.572Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ba/89b2901dd77414dd7a8c8729985832a5735053be15b744c18e4586e506ef/mypy-1.18.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20c02215a080e3a2be3aa50506c67242df1c151eaba0dcbc1e4e557922a26075", size = 13514749, upload-time = "2025-09-19T00:10:44.827Z" }, + { url = "https://files.pythonhosted.org/packages/25/bc/cc98767cffd6b2928ba680f3e5bc969c4152bf7c2d83f92f5a504b92b0eb/mypy-1.18.2-cp314-cp314-win_amd64.whl", hash = "sha256:749b5f83198f1ca64345603118a6f01a4e99ad4bf9d103ddc5a3200cc4614adf", size = 9982959, upload-time = "2025-09-19T00:10:37.344Z" }, + { url = "https://files.pythonhosted.org/packages/3f/a6/490ff491d8ecddf8ab91762d4f67635040202f76a44171420bcbe38ceee5/mypy-1.18.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:25a9c8fb67b00599f839cf472713f54249a62efd53a54b565eb61956a7e3296b", size = 12807230, upload-time = "2025-09-19T00:09:49.471Z" }, + { url = "https://files.pythonhosted.org/packages/eb/2e/60076fc829645d167ece9e80db9e8375648d210dab44cc98beb5b322a826/mypy-1.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2b9c7e284ee20e7598d6f42e13ca40b4928e6957ed6813d1ab6348aa3f47133", size = 11895666, upload-time = "2025-09-19T00:10:53.678Z" }, + { url = "https://files.pythonhosted.org/packages/97/4a/1e2880a2a5dda4dc8d9ecd1a7e7606bc0b0e14813637eeda40c38624e037/mypy-1.18.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d6985ed057513e344e43a26cc1cd815c7a94602fb6a3130a34798625bc2f07b6", size = 12499608, upload-time = "2025-09-19T00:09:36.204Z" }, + { url = "https://files.pythonhosted.org/packages/00/81/a117f1b73a3015b076b20246b1f341c34a578ebd9662848c6b80ad5c4138/mypy-1.18.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22f27105f1525ec024b5c630c0b9f36d5c1cc4d447d61fe51ff4bd60633f47ac", size = 13244551, upload-time = "2025-09-19T00:10:17.531Z" }, + { url = "https://files.pythonhosted.org/packages/9b/61/b9f48e1714ce87c7bf0358eb93f60663740ebb08f9ea886ffc670cea7933/mypy-1.18.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:030c52d0ea8144e721e49b1f68391e39553d7451f0c3f8a7565b59e19fcb608b", size = 13491552, upload-time = "2025-09-19T00:10:13.753Z" }, + { url = "https://files.pythonhosted.org/packages/c9/66/b2c0af3b684fa80d1b27501a8bdd3d2daa467ea3992a8aa612f5ca17c2db/mypy-1.18.2-cp39-cp39-win_amd64.whl", hash = "sha256:aa5e07ac1a60a253445797e42b8b2963c9675563a94f11291ab40718b016a7a0", size = 9765635, upload-time = "2025-09-19T00:10:30.993Z" }, + { url = "https://files.pythonhosted.org/packages/87/e3/be76d87158ebafa0309946c4a73831974d4d6ab4f4ef40c3b53a385a66fd/mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e", size = 2352367, upload-time = "2025-09-19T00:10:15.489Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "nh3" +version = "0.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/a6/c6e942fc8dcadab08645f57a6d01d63e97114a30ded5f269dc58e05d4741/nh3-0.3.1.tar.gz", hash = "sha256:6a854480058683d60bdc7f0456105092dae17bef1f300642856d74bd4201da93", size = 18590, upload-time = "2025-10-07T03:27:58.217Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/24/4becaa61e066ff694c37627f5ef7528901115ffa17f7a6693c40da52accd/nh3-0.3.1-cp313-cp313t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:80dc7563a2a3b980e44b221f69848e3645bbf163ab53e3d1add4f47b26120355", size = 1420887, upload-time = "2025-10-07T03:27:25.654Z" }, + { url = "https://files.pythonhosted.org/packages/94/49/16a6ec9098bb9bdf0fb9f09d6464865a3a48858d8d96e779a998ec3bdce0/nh3-0.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f600ad86114df21efc4a3592faa6b1d099c0eebc7e018efebb1c133376097da", size = 791700, upload-time = "2025-10-07T03:27:27.041Z" }, + { url = "https://files.pythonhosted.org/packages/1d/cc/1c024d7c23ad031dfe82ad59581736abcc403b006abb0d2785bffa768b54/nh3-0.3.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:669a908706cd28203d9cfce2f567575686e364a1bc6074d413d88d456066f743", size = 830225, upload-time = "2025-10-07T03:27:28.315Z" }, + { url = "https://files.pythonhosted.org/packages/89/08/4a87f9212373bd77bba01c1fd515220e0d263316f448d9c8e4b09732a645/nh3-0.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a5721f59afa0ab3dcaa0d47e58af33a5fcd254882e1900ee4a8968692a40f79d", size = 999112, upload-time = "2025-10-07T03:27:29.782Z" }, + { url = "https://files.pythonhosted.org/packages/19/cf/94783911eb966881a440ba9641944c27152662a253c917a794a368b92a3c/nh3-0.3.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2cb6d9e192fbe0d451c7cb1350dadedbeae286207dbf101a28210193d019752e", size = 1070424, upload-time = "2025-10-07T03:27:31.2Z" }, + { url = "https://files.pythonhosted.org/packages/71/44/efb57b44e86a3de528561b49ed53803e5d42cd0441dcfd29b89422160266/nh3-0.3.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:474b176124c1b495ccfa1c20f61b7eb83ead5ecccb79ab29f602c148e8378489", size = 996129, upload-time = "2025-10-07T03:27:32.595Z" }, + { url = "https://files.pythonhosted.org/packages/ee/d3/87c39ea076510e57ee99a27fa4c2335e9e5738172b3963ee7c744a32726c/nh3-0.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4a2434668f4eef4eab17c128e565ce6bea42113ce10c40b928e42c578d401800", size = 980310, upload-time = "2025-10-07T03:27:34.282Z" }, + { url = "https://files.pythonhosted.org/packages/bc/30/00cfbd2a4d268e8d3bda9d1542ba4f7a20fbed37ad1e8e51beeee3f6fdae/nh3-0.3.1-cp313-cp313t-win32.whl", hash = "sha256:0f454ba4c6aabafcaae964ae6f0a96cecef970216a57335fabd229a265fbe007", size = 584439, upload-time = "2025-10-07T03:27:36.103Z" }, + { url = "https://files.pythonhosted.org/packages/80/fa/39d27a62a2f39eb88c2bd50d9fee365a3645e456f3ec483c945a49c74f47/nh3-0.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:22b9e9c9eda497b02b7273b79f7d29e1f1170d2b741624c1b8c566aef28b1f48", size = 592388, upload-time = "2025-10-07T03:27:37.075Z" }, + { url = "https://files.pythonhosted.org/packages/7c/39/7df1c4ee13ef65ee06255df8101141793e97b4326e8509afbce5deada2b5/nh3-0.3.1-cp313-cp313t-win_arm64.whl", hash = "sha256:42e426f36e167ed29669b77ae3c4b9e185e4a1b130a86d7c3249194738a1d7b2", size = 579337, upload-time = "2025-10-07T03:27:38.055Z" }, + { url = "https://files.pythonhosted.org/packages/e1/28/a387fed70438d2810c8ac866e7b24bf1a5b6f30ae65316dfe4de191afa52/nh3-0.3.1-cp38-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:1de5c1a35bed19a1b1286bab3c3abfe42e990a8a6c4ce9bb9ab4bde49107ea3b", size = 1433666, upload-time = "2025-10-07T03:27:39.118Z" }, + { url = "https://files.pythonhosted.org/packages/c7/f9/500310c1f19cc80770a81aac3c94a0c6b4acdd46489e34019173b2b15a50/nh3-0.3.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaba26591867f697cffdbc539faddeb1d75a36273f5bfe957eb421d3f87d7da1", size = 819897, upload-time = "2025-10-07T03:27:40.488Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d4/ebb0965d767cba943793fa8f7b59d7f141bd322c86387a5e9485ad49754a/nh3-0.3.1-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:489ca5ecd58555c2865701e65f614b17555179e71ecc76d483b6f3886b813a9b", size = 803562, upload-time = "2025-10-07T03:27:41.86Z" }, + { url = "https://files.pythonhosted.org/packages/0a/9c/df037a13f0513283ecee1cf99f723b18e5f87f20e480582466b1f8e3a7db/nh3-0.3.1-cp38-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5a25662b392b06f251da6004a1f8a828dca7f429cd94ac07d8a98ba94d644438", size = 1050854, upload-time = "2025-10-07T03:27:43.29Z" }, + { url = "https://files.pythonhosted.org/packages/d0/9d/488fce56029de430e30380ec21f29cfaddaf0774f63b6aa2bf094c8b4c27/nh3-0.3.1-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38b4872499ab15b17c5c6e9f091143d070d75ddad4a4d1ce388d043ca556629c", size = 1002152, upload-time = "2025-10-07T03:27:44.358Z" }, + { url = "https://files.pythonhosted.org/packages/da/4a/24b0118de34d34093bf03acdeca3a9556f8631d4028814a72b9cc5216382/nh3-0.3.1-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48425995d37880281b467f7cf2b3218c1f4750c55bcb1ff4f47f2320a2bb159c", size = 912333, upload-time = "2025-10-07T03:27:45.757Z" }, + { url = "https://files.pythonhosted.org/packages/11/0e/16b3886858b3953ef836dea25b951f3ab0c5b5a431da03f675c0e999afb8/nh3-0.3.1-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94292dd1bd2a2e142fa5bb94c0ee1d84433a5d9034640710132da7e0376fca3a", size = 796945, upload-time = "2025-10-07T03:27:47.169Z" }, + { url = "https://files.pythonhosted.org/packages/87/bb/aac139cf6796f2e0fec026b07843cea36099864ec104f865e2d802a25a30/nh3-0.3.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dd6d1be301123a9af3263739726eeeb208197e5e78fc4f522408c50de77a5354", size = 837257, upload-time = "2025-10-07T03:27:48.243Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d7/1d770876a288a3f5369fd6c816363a5f9d3a071dba24889458fdeb4f7a49/nh3-0.3.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b74bbd047b361c0f21d827250c865ff0895684d9fcf85ea86131a78cfa0b835b", size = 1004142, upload-time = "2025-10-07T03:27:49.278Z" }, + { url = "https://files.pythonhosted.org/packages/31/2a/c4259e8b94c2f4ba10a7560e0889a6b7d2f70dce7f3e93f6153716aaae47/nh3-0.3.1-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:b222c05ae5139320da6caa1c5aed36dd0ee36e39831541d9b56e048a63b4d701", size = 1075896, upload-time = "2025-10-07T03:27:50.527Z" }, + { url = "https://files.pythonhosted.org/packages/59/06/b15ba9fea4773741acb3382dcf982f81e55f6053e8a6e72a97ac91928b1d/nh3-0.3.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:b0d6c834d3c07366ecbdcecc1f4804c5ce0a77fa52ee4653a2a26d2d909980ea", size = 1003235, upload-time = "2025-10-07T03:27:51.673Z" }, + { url = "https://files.pythonhosted.org/packages/1d/13/74707f99221bbe0392d18611b51125d45f8bd5c6be077ef85575eb7a38b1/nh3-0.3.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:670f18b09f75c86c3865f79543bf5acd4bbe2a5a4475672eef2399dd8cdb69d2", size = 987308, upload-time = "2025-10-07T03:27:53.003Z" }, + { url = "https://files.pythonhosted.org/packages/ee/81/24bf41a5ce7648d7e954de40391bb1bcc4b7731214238c7138c2420f962c/nh3-0.3.1-cp38-abi3-win32.whl", hash = "sha256:d7431b2a39431017f19cd03144005b6c014201b3e73927c05eab6ca37bb1d98c", size = 591695, upload-time = "2025-10-07T03:27:54.43Z" }, + { url = "https://files.pythonhosted.org/packages/a5/ca/263eb96b6d32c61a92c1e5480b7f599b60db7d7fbbc0d944be7532d0ac42/nh3-0.3.1-cp38-abi3-win_amd64.whl", hash = "sha256:c0acef923a1c3a2df3ee5825ea79c149b6748c6449781c53ab6923dc75e87d26", size = 600564, upload-time = "2025-10-07T03:27:55.966Z" }, + { url = "https://files.pythonhosted.org/packages/34/67/d5e07efd38194f52b59b8af25a029b46c0643e9af68204ee263022924c27/nh3-0.3.1-cp38-abi3-win_arm64.whl", hash = "sha256:a3e810a92fb192373204456cac2834694440af73d749565b4348e30235da7f0b", size = 586369, upload-time = "2025-10-07T03:27:57.234Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + +[[package]] +name = "pierre-storage" +version = "0.7.1" +source = { editable = "." } +dependencies = [ + { name = "cryptography" }, + { name = "httpx" }, + { name = "pydantic" }, + { name = "pyjwt" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, +] + +[package.optional-dependencies] +dev = [ + { name = "build" }, + { name = "mypy" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-cov" }, + { name = "ruff" }, + { name = "twine" }, +] + +[package.dev-dependencies] +dev = [ + { name = "build" }, + { name = "mypy" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-cov" }, + { name = "ruff" }, + { name = "twine" }, +] + +[package.metadata] +requires-dist = [ + { name = "build", marker = "extra == 'dev'", specifier = ">=1.0.0" }, + { name = "cryptography", specifier = ">=41.0.0" }, + { name = "httpx", specifier = ">=0.27.0" }, + { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.5.0" }, + { name = "pydantic", specifier = ">=2.0.0" }, + { name = "pyjwt", specifier = ">=2.8.0" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=7.4.0" }, + { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.21.0" }, + { name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=4.1.0" }, + { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.1.0" }, + { name = "twine", marker = "extra == 'dev'", specifier = ">=4.0.0" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'", specifier = ">=4.5.0" }, +] +provides-extras = ["dev"] + +[package.metadata.requires-dev] +dev = [ + { name = "build", specifier = ">=1.0.0" }, + { name = "mypy", specifier = ">=1.5.0" }, + { name = "pytest", specifier = ">=7.4.0" }, + { name = "pytest-asyncio", specifier = ">=0.21.0" }, + { name = "pytest-cov", specifier = ">=4.1.0" }, + { name = "ruff", specifier = ">=0.1.0" }, + { name = "twine", specifier = ">=4.0.0" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pycparser" +version = "2.23" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/1e/4f0a3233767010308f2fd6bd0814597e3f63f1dc98304a9112b8759df4ff/pydantic-2.12.3.tar.gz", hash = "sha256:1da1c82b0fc140bb0103bc1441ffe062154c8d38491189751ee00fd8ca65ce74", size = 819383, upload-time = "2025-10-17T15:04:21.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/6b/83661fa77dcefa195ad5f8cd9af3d1a7450fd57cc883ad04d65446ac2029/pydantic-2.12.3-py3-none-any.whl", hash = "sha256:6986454a854bc3bc6e5443e1369e06a3a456af9d339eda45510f517d9ea5c6bf", size = 462431, upload-time = "2025-10-17T15:04:19.346Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/18/d0944e8eaaa3efd0a91b0f1fc537d3be55ad35091b6a87638211ba691964/pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5", size = 457557, upload-time = "2025-10-14T10:23:47.909Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/3d/9b8ca77b0f76fcdbf8bc6b72474e264283f461284ca84ac3fde570c6c49a/pydantic_core-2.41.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2442d9a4d38f3411f22eb9dd0912b7cbf4b7d5b6c92c4173b75d3e1ccd84e36e", size = 2111197, upload-time = "2025-10-14T10:19:43.303Z" }, + { url = "https://files.pythonhosted.org/packages/59/92/b7b0fe6ed4781642232755cb7e56a86e2041e1292f16d9ae410a0ccee5ac/pydantic_core-2.41.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:30a9876226dda131a741afeab2702e2d127209bde3c65a2b8133f428bc5d006b", size = 1917909, upload-time = "2025-10-14T10:19:45.194Z" }, + { url = "https://files.pythonhosted.org/packages/52/8c/3eb872009274ffa4fb6a9585114e161aa1a0915af2896e2d441642929fe4/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d55bbac04711e2980645af68b97d445cdbcce70e5216de444a6c4b6943ebcccd", size = 1969905, upload-time = "2025-10-14T10:19:46.567Z" }, + { url = "https://files.pythonhosted.org/packages/f4/21/35adf4a753bcfaea22d925214a0c5b880792e3244731b3f3e6fec0d124f7/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1d778fb7849a42d0ee5927ab0f7453bf9f85eef8887a546ec87db5ddb178945", size = 2051938, upload-time = "2025-10-14T10:19:48.237Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d0/cdf7d126825e36d6e3f1eccf257da8954452934ede275a8f390eac775e89/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b65077a4693a98b90ec5ad8f203ad65802a1b9b6d4a7e48066925a7e1606706", size = 2250710, upload-time = "2025-10-14T10:19:49.619Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1c/af1e6fd5ea596327308f9c8d1654e1285cc3d8de0d584a3c9d7705bf8a7c/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62637c769dee16eddb7686bf421be48dfc2fae93832c25e25bc7242e698361ba", size = 2367445, upload-time = "2025-10-14T10:19:51.269Z" }, + { url = "https://files.pythonhosted.org/packages/d3/81/8cece29a6ef1b3a92f956ea6da6250d5b2d2e7e4d513dd3b4f0c7a83dfea/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfe3aa529c8f501babf6e502936b9e8d4698502b2cfab41e17a028d91b1ac7b", size = 2072875, upload-time = "2025-10-14T10:19:52.671Z" }, + { url = "https://files.pythonhosted.org/packages/e3/37/a6a579f5fc2cd4d5521284a0ab6a426cc6463a7b3897aeb95b12f1ba607b/pydantic_core-2.41.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca2322da745bf2eeb581fc9ea3bbb31147702163ccbcbf12a3bb630e4bf05e1d", size = 2191329, upload-time = "2025-10-14T10:19:54.214Z" }, + { url = "https://files.pythonhosted.org/packages/ae/03/505020dc5c54ec75ecba9f41119fd1e48f9e41e4629942494c4a8734ded1/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e8cd3577c796be7231dcf80badcf2e0835a46665eaafd8ace124d886bab4d700", size = 2151658, upload-time = "2025-10-14T10:19:55.843Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5d/2c0d09fb53aa03bbd2a214d89ebfa6304be7df9ed86ee3dc7770257f41ee/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:1cae8851e174c83633f0833e90636832857297900133705ee158cf79d40f03e6", size = 2316777, upload-time = "2025-10-14T10:19:57.607Z" }, + { url = "https://files.pythonhosted.org/packages/ea/4b/c2c9c8f5e1f9c864b57d08539d9d3db160e00491c9f5ee90e1bfd905e644/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a26d950449aae348afe1ac8be5525a00ae4235309b729ad4d3399623125b43c9", size = 2320705, upload-time = "2025-10-14T10:19:59.016Z" }, + { url = "https://files.pythonhosted.org/packages/28/c3/a74c1c37f49c0a02c89c7340fafc0ba816b29bd495d1a31ce1bdeacc6085/pydantic_core-2.41.4-cp310-cp310-win32.whl", hash = "sha256:0cf2a1f599efe57fa0051312774280ee0f650e11152325e41dfd3018ef2c1b57", size = 1975464, upload-time = "2025-10-14T10:20:00.581Z" }, + { url = "https://files.pythonhosted.org/packages/d6/23/5dd5c1324ba80303368f7569e2e2e1a721c7d9eb16acb7eb7b7f85cb1be2/pydantic_core-2.41.4-cp310-cp310-win_amd64.whl", hash = "sha256:a8c2e340d7e454dc3340d3d2e8f23558ebe78c98aa8f68851b04dcb7bc37abdc", size = 2024497, upload-time = "2025-10-14T10:20:03.018Z" }, + { url = "https://files.pythonhosted.org/packages/62/4c/f6cbfa1e8efacd00b846764e8484fe173d25b8dab881e277a619177f3384/pydantic_core-2.41.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:28ff11666443a1a8cf2a044d6a545ebffa8382b5f7973f22c36109205e65dc80", size = 2109062, upload-time = "2025-10-14T10:20:04.486Z" }, + { url = "https://files.pythonhosted.org/packages/21/f8/40b72d3868896bfcd410e1bd7e516e762d326201c48e5b4a06446f6cf9e8/pydantic_core-2.41.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61760c3925d4633290292bad462e0f737b840508b4f722247d8729684f6539ae", size = 1916301, upload-time = "2025-10-14T10:20:06.857Z" }, + { url = "https://files.pythonhosted.org/packages/94/4d/d203dce8bee7faeca791671c88519969d98d3b4e8f225da5b96dad226fc8/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae547b7315d055b0de2ec3965643b0ab82ad0106a7ffd29615ee9f266a02827", size = 1968728, upload-time = "2025-10-14T10:20:08.353Z" }, + { url = "https://files.pythonhosted.org/packages/65/f5/6a66187775df87c24d526985b3a5d78d861580ca466fbd9d4d0e792fcf6c/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef9ee5471edd58d1fcce1c80ffc8783a650e3e3a193fe90d52e43bb4d87bff1f", size = 2050238, upload-time = "2025-10-14T10:20:09.766Z" }, + { url = "https://files.pythonhosted.org/packages/5e/b9/78336345de97298cf53236b2f271912ce11f32c1e59de25a374ce12f9cce/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15dd504af121caaf2c95cb90c0ebf71603c53de98305621b94da0f967e572def", size = 2249424, upload-time = "2025-10-14T10:20:11.732Z" }, + { url = "https://files.pythonhosted.org/packages/99/bb/a4584888b70ee594c3d374a71af5075a68654d6c780369df269118af7402/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a926768ea49a8af4d36abd6a8968b8790f7f76dd7cbd5a4c180db2b4ac9a3a2", size = 2366047, upload-time = "2025-10-14T10:20:13.647Z" }, + { url = "https://files.pythonhosted.org/packages/5f/8d/17fc5de9d6418e4d2ae8c675f905cdafdc59d3bf3bf9c946b7ab796a992a/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6916b9b7d134bff5440098a4deb80e4cb623e68974a87883299de9124126c2a8", size = 2071163, upload-time = "2025-10-14T10:20:15.307Z" }, + { url = "https://files.pythonhosted.org/packages/54/e7/03d2c5c0b8ed37a4617430db68ec5e7dbba66358b629cd69e11b4d564367/pydantic_core-2.41.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cf90535979089df02e6f17ffd076f07237efa55b7343d98760bde8743c4b265", size = 2190585, upload-time = "2025-10-14T10:20:17.3Z" }, + { url = "https://files.pythonhosted.org/packages/be/fc/15d1c9fe5ad9266a5897d9b932b7f53d7e5cfc800573917a2c5d6eea56ec/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7533c76fa647fade2d7ec75ac5cc079ab3f34879626dae5689b27790a6cf5a5c", size = 2150109, upload-time = "2025-10-14T10:20:19.143Z" }, + { url = "https://files.pythonhosted.org/packages/26/ef/e735dd008808226c83ba56972566138665b71477ad580fa5a21f0851df48/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:37e516bca9264cbf29612539801ca3cd5d1be465f940417b002905e6ed79d38a", size = 2315078, upload-time = "2025-10-14T10:20:20.742Z" }, + { url = "https://files.pythonhosted.org/packages/90/00/806efdcf35ff2ac0f938362350cd9827b8afb116cc814b6b75cf23738c7c/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0c19cb355224037c83642429b8ce261ae108e1c5fbf5c028bac63c77b0f8646e", size = 2318737, upload-time = "2025-10-14T10:20:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/41/7e/6ac90673fe6cb36621a2283552897838c020db343fa86e513d3f563b196f/pydantic_core-2.41.4-cp311-cp311-win32.whl", hash = "sha256:09c2a60e55b357284b5f31f5ab275ba9f7f70b7525e18a132ec1f9160b4f1f03", size = 1974160, upload-time = "2025-10-14T10:20:23.817Z" }, + { url = "https://files.pythonhosted.org/packages/e0/9d/7c5e24ee585c1f8b6356e1d11d40ab807ffde44d2db3b7dfd6d20b09720e/pydantic_core-2.41.4-cp311-cp311-win_amd64.whl", hash = "sha256:711156b6afb5cb1cb7c14a2cc2c4a8b4c717b69046f13c6b332d8a0a8f41ca3e", size = 2021883, upload-time = "2025-10-14T10:20:25.48Z" }, + { url = "https://files.pythonhosted.org/packages/33/90/5c172357460fc28b2871eb4a0fb3843b136b429c6fa827e4b588877bf115/pydantic_core-2.41.4-cp311-cp311-win_arm64.whl", hash = "sha256:6cb9cf7e761f4f8a8589a45e49ed3c0d92d1d696a45a6feaee8c904b26efc2db", size = 1968026, upload-time = "2025-10-14T10:20:27.039Z" }, + { url = "https://files.pythonhosted.org/packages/e9/81/d3b3e95929c4369d30b2a66a91db63c8ed0a98381ae55a45da2cd1cc1288/pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887", size = 2099043, upload-time = "2025-10-14T10:20:28.561Z" }, + { url = "https://files.pythonhosted.org/packages/58/da/46fdac49e6717e3a94fc9201403e08d9d61aa7a770fab6190b8740749047/pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2", size = 1910699, upload-time = "2025-10-14T10:20:30.217Z" }, + { url = "https://files.pythonhosted.org/packages/1e/63/4d948f1b9dd8e991a5a98b77dd66c74641f5f2e5225fee37994b2e07d391/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999", size = 1952121, upload-time = "2025-10-14T10:20:32.246Z" }, + { url = "https://files.pythonhosted.org/packages/b2/a7/e5fc60a6f781fc634ecaa9ecc3c20171d238794cef69ae0af79ac11b89d7/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4", size = 2041590, upload-time = "2025-10-14T10:20:34.332Z" }, + { url = "https://files.pythonhosted.org/packages/70/69/dce747b1d21d59e85af433428978a1893c6f8a7068fa2bb4a927fba7a5ff/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f", size = 2219869, upload-time = "2025-10-14T10:20:35.965Z" }, + { url = "https://files.pythonhosted.org/packages/83/6a/c070e30e295403bf29c4df1cb781317b6a9bac7cd07b8d3acc94d501a63c/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b", size = 2345169, upload-time = "2025-10-14T10:20:37.627Z" }, + { url = "https://files.pythonhosted.org/packages/f0/83/06d001f8043c336baea7fd202a9ac7ad71f87e1c55d8112c50b745c40324/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47", size = 2070165, upload-time = "2025-10-14T10:20:39.246Z" }, + { url = "https://files.pythonhosted.org/packages/14/0a/e567c2883588dd12bcbc110232d892cf385356f7c8a9910311ac997ab715/pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970", size = 2189067, upload-time = "2025-10-14T10:20:41.015Z" }, + { url = "https://files.pythonhosted.org/packages/f4/1d/3d9fca34273ba03c9b1c5289f7618bc4bd09c3ad2289b5420481aa051a99/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed", size = 2132997, upload-time = "2025-10-14T10:20:43.106Z" }, + { url = "https://files.pythonhosted.org/packages/52/70/d702ef7a6cd41a8afc61f3554922b3ed8d19dd54c3bd4bdbfe332e610827/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8", size = 2307187, upload-time = "2025-10-14T10:20:44.849Z" }, + { url = "https://files.pythonhosted.org/packages/68/4c/c06be6e27545d08b802127914156f38d10ca287a9e8489342793de8aae3c/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431", size = 2305204, upload-time = "2025-10-14T10:20:46.781Z" }, + { url = "https://files.pythonhosted.org/packages/b0/e5/35ae4919bcd9f18603419e23c5eaf32750224a89d41a8df1a3704b69f77e/pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd", size = 1972536, upload-time = "2025-10-14T10:20:48.39Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/49c5bb6d2a49eb2ee3647a93e3dae7080c6409a8a7558b075027644e879c/pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff", size = 2031132, upload-time = "2025-10-14T10:20:50.421Z" }, + { url = "https://files.pythonhosted.org/packages/06/23/936343dbcba6eec93f73e95eb346810fc732f71ba27967b287b66f7b7097/pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8", size = 1969483, upload-time = "2025-10-14T10:20:52.35Z" }, + { url = "https://files.pythonhosted.org/packages/13/d0/c20adabd181a029a970738dfe23710b52a31f1258f591874fcdec7359845/pydantic_core-2.41.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:85e050ad9e5f6fe1004eec65c914332e52f429bc0ae12d6fa2092407a462c746", size = 2105688, upload-time = "2025-10-14T10:20:54.448Z" }, + { url = "https://files.pythonhosted.org/packages/00/b6/0ce5c03cec5ae94cca220dfecddc453c077d71363b98a4bbdb3c0b22c783/pydantic_core-2.41.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7393f1d64792763a48924ba31d1e44c2cfbc05e3b1c2c9abb4ceeadd912cced", size = 1910807, upload-time = "2025-10-14T10:20:56.115Z" }, + { url = "https://files.pythonhosted.org/packages/68/3e/800d3d02c8beb0b5c069c870cbb83799d085debf43499c897bb4b4aaff0d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94dab0940b0d1fb28bcab847adf887c66a27a40291eedf0b473be58761c9799a", size = 1956669, upload-time = "2025-10-14T10:20:57.874Z" }, + { url = "https://files.pythonhosted.org/packages/60/a4/24271cc71a17f64589be49ab8bd0751f6a0a03046c690df60989f2f95c2c/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de7c42f897e689ee6f9e93c4bec72b99ae3b32a2ade1c7e4798e690ff5246e02", size = 2051629, upload-time = "2025-10-14T10:21:00.006Z" }, + { url = "https://files.pythonhosted.org/packages/68/de/45af3ca2f175d91b96bfb62e1f2d2f1f9f3b14a734afe0bfeff079f78181/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:664b3199193262277b8b3cd1e754fb07f2c6023289c815a1e1e8fb415cb247b1", size = 2224049, upload-time = "2025-10-14T10:21:01.801Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/ae4e1ff84672bf869d0a77af24fd78387850e9497753c432875066b5d622/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95b253b88f7d308b1c0b417c4624f44553ba4762816f94e6986819b9c273fb2", size = 2342409, upload-time = "2025-10-14T10:21:03.556Z" }, + { url = "https://files.pythonhosted.org/packages/18/62/273dd70b0026a085c7b74b000394e1ef95719ea579c76ea2f0cc8893736d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84", size = 2069635, upload-time = "2025-10-14T10:21:05.385Z" }, + { url = "https://files.pythonhosted.org/packages/30/03/cf485fff699b4cdaea469bc481719d3e49f023241b4abb656f8d422189fc/pydantic_core-2.41.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1affa4798520b148d7182da0615d648e752de4ab1a9566b7471bc803d88a062d", size = 2194284, upload-time = "2025-10-14T10:21:07.122Z" }, + { url = "https://files.pythonhosted.org/packages/f9/7e/c8e713db32405dfd97211f2fc0a15d6bf8adb7640f3d18544c1f39526619/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b74e18052fea4aa8dea2fb7dbc23d15439695da6cbe6cfc1b694af1115df09d", size = 2137566, upload-time = "2025-10-14T10:21:08.981Z" }, + { url = "https://files.pythonhosted.org/packages/04/f7/db71fd4cdccc8b75990f79ccafbbd66757e19f6d5ee724a6252414483fb4/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:285b643d75c0e30abda9dc1077395624f314a37e3c09ca402d4015ef5979f1a2", size = 2316809, upload-time = "2025-10-14T10:21:10.805Z" }, + { url = "https://files.pythonhosted.org/packages/76/63/a54973ddb945f1bca56742b48b144d85c9fc22f819ddeb9f861c249d5464/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f52679ff4218d713b3b33f88c89ccbf3a5c2c12ba665fb80ccc4192b4608dbab", size = 2311119, upload-time = "2025-10-14T10:21:12.583Z" }, + { url = "https://files.pythonhosted.org/packages/f8/03/5d12891e93c19218af74843a27e32b94922195ded2386f7b55382f904d2f/pydantic_core-2.41.4-cp313-cp313-win32.whl", hash = "sha256:ecde6dedd6fff127c273c76821bb754d793be1024bc33314a120f83a3c69460c", size = 1981398, upload-time = "2025-10-14T10:21:14.584Z" }, + { url = "https://files.pythonhosted.org/packages/be/d8/fd0de71f39db91135b7a26996160de71c073d8635edfce8b3c3681be0d6d/pydantic_core-2.41.4-cp313-cp313-win_amd64.whl", hash = "sha256:d081a1f3800f05409ed868ebb2d74ac39dd0c1ff6c035b5162356d76030736d4", size = 2030735, upload-time = "2025-10-14T10:21:16.432Z" }, + { url = "https://files.pythonhosted.org/packages/72/86/c99921c1cf6650023c08bfab6fe2d7057a5142628ef7ccfa9921f2dda1d5/pydantic_core-2.41.4-cp313-cp313-win_arm64.whl", hash = "sha256:f8e49c9c364a7edcbe2a310f12733aad95b022495ef2a8d653f645e5d20c1564", size = 1973209, upload-time = "2025-10-14T10:21:18.213Z" }, + { url = "https://files.pythonhosted.org/packages/36/0d/b5706cacb70a8414396efdda3d72ae0542e050b591119e458e2490baf035/pydantic_core-2.41.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ed97fd56a561f5eb5706cebe94f1ad7c13b84d98312a05546f2ad036bafe87f4", size = 1877324, upload-time = "2025-10-14T10:21:20.363Z" }, + { url = "https://files.pythonhosted.org/packages/de/2d/cba1fa02cfdea72dfb3a9babb067c83b9dff0bbcb198368e000a6b756ea7/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a870c307bf1ee91fc58a9a61338ff780d01bfae45922624816878dce784095d2", size = 1884515, upload-time = "2025-10-14T10:21:22.339Z" }, + { url = "https://files.pythonhosted.org/packages/07/ea/3df927c4384ed9b503c9cc2d076cf983b4f2adb0c754578dfb1245c51e46/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25e97bc1f5f8f7985bdc2335ef9e73843bb561eb1fa6831fdfc295c1c2061cf", size = 2042819, upload-time = "2025-10-14T10:21:26.683Z" }, + { url = "https://files.pythonhosted.org/packages/6a/ee/df8e871f07074250270a3b1b82aad4cd0026b588acd5d7d3eb2fcb1471a3/pydantic_core-2.41.4-cp313-cp313t-win_amd64.whl", hash = "sha256:d405d14bea042f166512add3091c1af40437c2e7f86988f3915fabd27b1e9cd2", size = 1995866, upload-time = "2025-10-14T10:21:28.951Z" }, + { url = "https://files.pythonhosted.org/packages/fc/de/b20f4ab954d6d399499c33ec4fafc46d9551e11dc1858fb7f5dca0748ceb/pydantic_core-2.41.4-cp313-cp313t-win_arm64.whl", hash = "sha256:19f3684868309db5263a11bace3c45d93f6f24afa2ffe75a647583df22a2ff89", size = 1970034, upload-time = "2025-10-14T10:21:30.869Z" }, + { url = "https://files.pythonhosted.org/packages/54/28/d3325da57d413b9819365546eb9a6e8b7cbd9373d9380efd5f74326143e6/pydantic_core-2.41.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:e9205d97ed08a82ebb9a307e92914bb30e18cdf6f6b12ca4bedadb1588a0bfe1", size = 2102022, upload-time = "2025-10-14T10:21:32.809Z" }, + { url = "https://files.pythonhosted.org/packages/9e/24/b58a1bc0d834bf1acc4361e61233ee217169a42efbdc15a60296e13ce438/pydantic_core-2.41.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:82df1f432b37d832709fbcc0e24394bba04a01b6ecf1ee87578145c19cde12ac", size = 1905495, upload-time = "2025-10-14T10:21:34.812Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a4/71f759cc41b7043e8ecdaab81b985a9b6cad7cec077e0b92cff8b71ecf6b/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3b4cc4539e055cfa39a3763c939f9d409eb40e85813257dcd761985a108554", size = 1956131, upload-time = "2025-10-14T10:21:36.924Z" }, + { url = "https://files.pythonhosted.org/packages/b0/64/1e79ac7aa51f1eec7c4cda8cbe456d5d09f05fdd68b32776d72168d54275/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1eb1754fce47c63d2ff57fdb88c351a6c0150995890088b33767a10218eaa4e", size = 2052236, upload-time = "2025-10-14T10:21:38.927Z" }, + { url = "https://files.pythonhosted.org/packages/e9/e3/a3ffc363bd4287b80f1d43dc1c28ba64831f8dfc237d6fec8f2661138d48/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6ab5ab30ef325b443f379ddb575a34969c333004fca5a1daa0133a6ffaad616", size = 2223573, upload-time = "2025-10-14T10:21:41.574Z" }, + { url = "https://files.pythonhosted.org/packages/28/27/78814089b4d2e684a9088ede3790763c64693c3d1408ddc0a248bc789126/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31a41030b1d9ca497634092b46481b937ff9397a86f9f51bd41c4767b6fc04af", size = 2342467, upload-time = "2025-10-14T10:21:44.018Z" }, + { url = "https://files.pythonhosted.org/packages/92/97/4de0e2a1159cb85ad737e03306717637842c88c7fd6d97973172fb183149/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a44ac1738591472c3d020f61c6df1e4015180d6262ebd39bf2aeb52571b60f12", size = 2063754, upload-time = "2025-10-14T10:21:46.466Z" }, + { url = "https://files.pythonhosted.org/packages/0f/50/8cb90ce4b9efcf7ae78130afeb99fd1c86125ccdf9906ef64b9d42f37c25/pydantic_core-2.41.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d72f2b5e6e82ab8f94ea7d0d42f83c487dc159c5240d8f83beae684472864e2d", size = 2196754, upload-time = "2025-10-14T10:21:48.486Z" }, + { url = "https://files.pythonhosted.org/packages/34/3b/ccdc77af9cd5082723574a1cc1bcae7a6acacc829d7c0a06201f7886a109/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4d1e854aaf044487d31143f541f7aafe7b482ae72a022c664b2de2e466ed0ad", size = 2137115, upload-time = "2025-10-14T10:21:50.63Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ba/e7c7a02651a8f7c52dc2cff2b64a30c313e3b57c7d93703cecea76c09b71/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b568af94267729d76e6ee5ececda4e283d07bbb28e8148bb17adad93d025d25a", size = 2317400, upload-time = "2025-10-14T10:21:52.959Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ba/6c533a4ee8aec6b812c643c49bb3bd88d3f01e3cebe451bb85512d37f00f/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6d55fb8b1e8929b341cc313a81a26e0d48aa3b519c1dbaadec3a6a2b4fcad025", size = 2312070, upload-time = "2025-10-14T10:21:55.419Z" }, + { url = "https://files.pythonhosted.org/packages/22/ae/f10524fcc0ab8d7f96cf9a74c880243576fd3e72bd8ce4f81e43d22bcab7/pydantic_core-2.41.4-cp314-cp314-win32.whl", hash = "sha256:5b66584e549e2e32a1398df11da2e0a7eff45d5c2d9db9d5667c5e6ac764d77e", size = 1982277, upload-time = "2025-10-14T10:21:57.474Z" }, + { url = "https://files.pythonhosted.org/packages/b4/dc/e5aa27aea1ad4638f0c3fb41132f7eb583bd7420ee63204e2d4333a3bbf9/pydantic_core-2.41.4-cp314-cp314-win_amd64.whl", hash = "sha256:557a0aab88664cc552285316809cab897716a372afaf8efdbef756f8b890e894", size = 2024608, upload-time = "2025-10-14T10:21:59.557Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/51d89cc2612bd147198e120a13f150afbf0bcb4615cddb049ab10b81b79e/pydantic_core-2.41.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f1ea6f48a045745d0d9f325989d8abd3f1eaf47dd00485912d1a3a63c623a8d", size = 1967614, upload-time = "2025-10-14T10:22:01.847Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c2/472f2e31b95eff099961fa050c376ab7156a81da194f9edb9f710f68787b/pydantic_core-2.41.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6c1fe4c5404c448b13188dd8bd2ebc2bdd7e6727fa61ff481bcc2cca894018da", size = 1876904, upload-time = "2025-10-14T10:22:04.062Z" }, + { url = "https://files.pythonhosted.org/packages/4a/07/ea8eeb91173807ecdae4f4a5f4b150a520085b35454350fc219ba79e66a3/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:523e7da4d43b113bf8e7b49fa4ec0c35bf4fe66b2230bfc5c13cc498f12c6c3e", size = 1882538, upload-time = "2025-10-14T10:22:06.39Z" }, + { url = "https://files.pythonhosted.org/packages/1e/29/b53a9ca6cd366bfc928823679c6a76c7a4c69f8201c0ba7903ad18ebae2f/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa", size = 2041183, upload-time = "2025-10-14T10:22:08.812Z" }, + { url = "https://files.pythonhosted.org/packages/c7/3d/f8c1a371ceebcaf94d6dd2d77c6cf4b1c078e13a5837aee83f760b4f7cfd/pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d", size = 1993542, upload-time = "2025-10-14T10:22:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/8a/ac/9fc61b4f9d079482a290afe8d206b8f490e9fd32d4fc03ed4fc698214e01/pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0", size = 1973897, upload-time = "2025-10-14T10:22:13.444Z" }, + { url = "https://files.pythonhosted.org/packages/2c/36/f86d582be5fb47d4014506cd9ddd10a3979b6d0f2d237aa6ad3e7033b3ea/pydantic_core-2.41.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:646e76293345954acea6966149683047b7b2ace793011922208c8e9da12b0062", size = 2112444, upload-time = "2025-10-14T10:22:16.165Z" }, + { url = "https://files.pythonhosted.org/packages/ba/e5/63c521dc2dd106ba6b5941c080617ea9db252f8a7d5625231e9d761bc28c/pydantic_core-2.41.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cc8e85a63085a137d286e2791037f5fdfff0aabb8b899483ca9c496dd5797338", size = 1938218, upload-time = "2025-10-14T10:22:19.443Z" }, + { url = "https://files.pythonhosted.org/packages/30/56/c84b638a3e6e9f5a612b9f5abdad73182520423de43669d639ed4f14b011/pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:692c622c8f859a17c156492783902d8370ac7e121a611bd6fe92cc71acf9ee8d", size = 1971449, upload-time = "2025-10-14T10:22:21.567Z" }, + { url = "https://files.pythonhosted.org/packages/99/c6/e974aade34fc7a0248fdfd0a373d62693502a407c596ab3470165e38183c/pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d1e2906efb1031a532600679b424ef1d95d9f9fb507f813951f23320903adbd7", size = 2054023, upload-time = "2025-10-14T10:22:24.229Z" }, + { url = "https://files.pythonhosted.org/packages/4f/91/2507dda801f50980a38d1353c313e8f51349a42b008e63a4e45bf4620562/pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e04e2f7f8916ad3ddd417a7abdd295276a0bf216993d9318a5d61cc058209166", size = 2251614, upload-time = "2025-10-14T10:22:26.498Z" }, + { url = "https://files.pythonhosted.org/packages/b2/ad/05d886bc96938f4d31bed24e8d3fc3496d9aea7e77bcff6e4b93127c6de7/pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df649916b81822543d1c8e0e1d079235f68acdc7d270c911e8425045a8cfc57e", size = 2378807, upload-time = "2025-10-14T10:22:28.733Z" }, + { url = "https://files.pythonhosted.org/packages/6a/0a/d26e1bb9a80b9fc12cc30d9288193fbc9e60a799e55843804ee37bd38a9c/pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66c529f862fdba70558061bb936fe00ddbaaa0c647fd26e4a4356ef1d6561891", size = 2076891, upload-time = "2025-10-14T10:22:30.853Z" }, + { url = "https://files.pythonhosted.org/packages/d9/66/af014e3a294d9933ebfecf11a5d858709014bd2315fa9616195374dd82f0/pydantic_core-2.41.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fc3b4c5a1fd3a311563ed866c2c9b62da06cb6398bee186484ce95c820db71cb", size = 2192179, upload-time = "2025-10-14T10:22:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3e/79783f97024037d0ea6e1b3ebcd761463a925199e04ce2625727e9f27d06/pydantic_core-2.41.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6e0fc40d84448f941df9b3334c4b78fe42f36e3bf631ad54c3047a0cdddc2514", size = 2153067, upload-time = "2025-10-14T10:22:35.792Z" }, + { url = "https://files.pythonhosted.org/packages/b3/97/ea83b0f87d9e742405fb687d5682e7a26334eef2c82a2de06bfbdc305fab/pydantic_core-2.41.4-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:44e7625332683b6c1c8b980461475cde9595eff94447500e80716db89b0da005", size = 2319048, upload-time = "2025-10-14T10:22:38.144Z" }, + { url = "https://files.pythonhosted.org/packages/64/4a/36d8c966a0b086362ac10a7ee75978ed15c5f2dfdfc02a1578d19d3802fb/pydantic_core-2.41.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:170ee6835f6c71081d031ef1c3b4dc4a12b9efa6a9540f93f95b82f3c7571ae8", size = 2321830, upload-time = "2025-10-14T10:22:40.337Z" }, + { url = "https://files.pythonhosted.org/packages/a2/6e/d80cc4909dde5f6842861288aa1a7181e7afbfc50940c862ed2848df15bd/pydantic_core-2.41.4-cp39-cp39-win32.whl", hash = "sha256:3adf61415efa6ce977041ba9745183c0e1f637ca849773afa93833e04b163feb", size = 1976706, upload-time = "2025-10-14T10:22:42.61Z" }, + { url = "https://files.pythonhosted.org/packages/29/ee/5bda8d960d4a8b24a7eeb8a856efa9c865a7a6cab714ed387b29507dc278/pydantic_core-2.41.4-cp39-cp39-win_amd64.whl", hash = "sha256:a238dd3feee263eeaeb7dc44aea4ba1364682c4f9f9467e6af5596ba322c2332", size = 2027640, upload-time = "2025-10-14T10:22:44.907Z" }, + { url = "https://files.pythonhosted.org/packages/b0/12/5ba58daa7f453454464f92b3ca7b9d7c657d8641c48e370c3ebc9a82dd78/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:a1b2cfec3879afb742a7b0bcfa53e4f22ba96571c9e54d6a3afe1052d17d843b", size = 2122139, upload-time = "2025-10-14T10:22:47.288Z" }, + { url = "https://files.pythonhosted.org/packages/21/fb/6860126a77725c3108baecd10fd3d75fec25191d6381b6eb2ac660228eac/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:d175600d975b7c244af6eb9c9041f10059f20b8bbffec9e33fdd5ee3f67cdc42", size = 1936674, upload-time = "2025-10-14T10:22:49.555Z" }, + { url = "https://files.pythonhosted.org/packages/de/be/57dcaa3ed595d81f8757e2b44a38240ac5d37628bce25fb20d02c7018776/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f184d657fa4947ae5ec9c47bd7e917730fa1cbb78195037e32dcbab50aca5ee", size = 1956398, upload-time = "2025-10-14T10:22:52.19Z" }, + { url = "https://files.pythonhosted.org/packages/2f/1d/679a344fadb9695f1a6a294d739fbd21d71fa023286daeea8c0ed49e7c2b/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed810568aeffed3edc78910af32af911c835cc39ebbfacd1f0ab5dd53028e5c", size = 2138674, upload-time = "2025-10-14T10:22:54.499Z" }, + { url = "https://files.pythonhosted.org/packages/c4/48/ae937e5a831b7c0dc646b2ef788c27cd003894882415300ed21927c21efa/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537", size = 2112087, upload-time = "2025-10-14T10:22:56.818Z" }, + { url = "https://files.pythonhosted.org/packages/5e/db/6db8073e3d32dae017da7e0d16a9ecb897d0a4d92e00634916e486097961/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94", size = 1920387, upload-time = "2025-10-14T10:22:59.342Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c1/dd3542d072fcc336030d66834872f0328727e3b8de289c662faa04aa270e/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c", size = 1951495, upload-time = "2025-10-14T10:23:02.089Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c6/db8d13a1f8ab3f1eb08c88bd00fd62d44311e3456d1e85c0e59e0a0376e7/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335", size = 2139008, upload-time = "2025-10-14T10:23:04.539Z" }, + { url = "https://files.pythonhosted.org/packages/5d/d4/912e976a2dd0b49f31c98a060ca90b353f3b73ee3ea2fd0030412f6ac5ec/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e5ab4fc177dd41536b3c32b2ea11380dd3d4619a385860621478ac2d25ceb00", size = 2106739, upload-time = "2025-10-14T10:23:06.934Z" }, + { url = "https://files.pythonhosted.org/packages/71/f0/66ec5a626c81eba326072d6ee2b127f8c139543f1bf609b4842978d37833/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d88d0054d3fa11ce936184896bed3c1c5441d6fa483b498fac6a5d0dd6f64a9", size = 1932549, upload-time = "2025-10-14T10:23:09.24Z" }, + { url = "https://files.pythonhosted.org/packages/c4/af/625626278ca801ea0a658c2dcf290dc9f21bb383098e99e7c6a029fccfc0/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2a054a8725f05b4b6503357e0ac1c4e8234ad3b0c2ac130d6ffc66f0e170e2", size = 2135093, upload-time = "2025-10-14T10:23:11.626Z" }, + { url = "https://files.pythonhosted.org/packages/20/f6/2fba049f54e0f4975fef66be654c597a1d005320fa141863699180c7697d/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0d9db5a161c99375a0c68c058e227bee1d89303300802601d76a3d01f74e258", size = 2187971, upload-time = "2025-10-14T10:23:14.437Z" }, + { url = "https://files.pythonhosted.org/packages/0e/80/65ab839a2dfcd3b949202f9d920c34f9de5a537c3646662bdf2f7d999680/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6273ea2c8ffdac7b7fda2653c49682db815aebf4a89243a6feccf5e36c18c347", size = 2147939, upload-time = "2025-10-14T10:23:16.831Z" }, + { url = "https://files.pythonhosted.org/packages/44/58/627565d3d182ce6dfda18b8e1c841eede3629d59c9d7cbc1e12a03aeb328/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:4c973add636efc61de22530b2ef83a65f39b6d6f656df97f678720e20de26caa", size = 2311400, upload-time = "2025-10-14T10:23:19.234Z" }, + { url = "https://files.pythonhosted.org/packages/24/06/8a84711162ad5a5f19a88cead37cca81b4b1f294f46260ef7334ae4f24d3/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b69d1973354758007f46cf2d44a4f3d0933f10b6dc9bf15cf1356e037f6f731a", size = 2316840, upload-time = "2025-10-14T10:23:21.738Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8b/b7bb512a4682a2f7fbfae152a755d37351743900226d29bd953aaf870eaa/pydantic_core-2.41.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3619320641fd212aaf5997b6ca505e97540b7e16418f4a241f44cdf108ffb50d", size = 2149135, upload-time = "2025-10-14T10:23:24.379Z" }, + { url = "https://files.pythonhosted.org/packages/7e/7d/138e902ed6399b866f7cfe4435d22445e16fff888a1c00560d9dc79a780f/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:491535d45cd7ad7e4a2af4a5169b0d07bebf1adfd164b0368da8aa41e19907a5", size = 2104721, upload-time = "2025-10-14T10:23:26.906Z" }, + { url = "https://files.pythonhosted.org/packages/47/13/0525623cf94627f7b53b4c2034c81edc8491cbfc7c28d5447fa318791479/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:54d86c0cada6aba4ec4c047d0e348cbad7063b87ae0f005d9f8c9ad04d4a92a2", size = 1931608, upload-time = "2025-10-14T10:23:29.306Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f9/744bc98137d6ef0a233f808bfc9b18cf94624bf30836a18d3b05d08bf418/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca1124aced216b2500dc2609eade086d718e8249cb9696660ab447d50a758bd", size = 2132986, upload-time = "2025-10-14T10:23:32.057Z" }, + { url = "https://files.pythonhosted.org/packages/17/c8/629e88920171173f6049386cc71f893dff03209a9ef32b4d2f7e7c264bcf/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c9024169becccf0cb470ada03ee578d7348c119a0d42af3dcf9eda96e3a247c", size = 2187516, upload-time = "2025-10-14T10:23:34.871Z" }, + { url = "https://files.pythonhosted.org/packages/2e/0f/4f2734688d98488782218ca61bcc118329bf5de05bb7fe3adc7dd79b0b86/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:26895a4268ae5a2849269f4991cdc97236e4b9c010e51137becf25182daac405", size = 2146146, upload-time = "2025-10-14T10:23:37.342Z" }, + { url = "https://files.pythonhosted.org/packages/ed/f2/ab385dbd94a052c62224b99cf99002eee99dbec40e10006c78575aead256/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:ca4df25762cf71308c446e33c9b1fdca2923a3f13de616e2a949f38bf21ff5a8", size = 2311296, upload-time = "2025-10-14T10:23:40.145Z" }, + { url = "https://files.pythonhosted.org/packages/fc/8e/e4f12afe1beeb9823bba5375f8f258df0cc61b056b0195fb1cf9f62a1a58/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5a28fcedd762349519276c36634e71853b4541079cab4acaaac60c4421827308", size = 2315386, upload-time = "2025-10-14T10:23:42.624Z" }, + { url = "https://files.pythonhosted.org/packages/48/f7/925f65d930802e3ea2eb4d5afa4cb8730c8dc0d2cb89a59dc4ed2fcb2d74/pydantic_core-2.41.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c173ddcd86afd2535e2b695217e82191580663a1d1928239f877f5a1649ef39f", size = 2147775, upload-time = "2025-10-14T10:23:45.406Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pyjwt" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, +] + +[[package]] +name = "pyproject-hooks" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/82/28175b2414effca1cdac8dc99f76d660e7a4fb0ceefa4b4ab8f5f6742925/pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8", size = 19228, upload-time = "2024-09-29T09:24:13.293Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/24/12818598c362d7f300f18e74db45963dbcb85150324092410c8b49405e42/pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913", size = 10216, upload-time = "2024-09-29T09:24:11.978Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig", version = "2.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "iniconfig", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-asyncio-runner", marker = "python_full_version < '3.11'" }, + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/86/9e3c5f48f7b7b638b216e4b9e645f54d199d7abbbab7a64a13b4e12ba10f/pytest_asyncio-1.2.0.tar.gz", hash = "sha256:c609a64a2a8768462d0c99811ddb8bd2583c33fd33cf7f21af1c142e824ffb57", size = 50119, upload-time = "2025-09-12T07:33:53.816Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/93/2fa34714b7a4ae72f2f8dad66ba17dd9a2c793220719e736dda28b7aec27/pytest_asyncio-1.2.0-py3-none-any.whl", hash = "sha256:8e17ae5e46d8e7efe51ab6494dd2010f4ca8dae51652aa3c8d55acf50bfb2e99", size = 15095, upload-time = "2025-09-12T07:33:52.639Z" }, +] + +[[package]] +name = "pytest-cov" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", version = "7.10.7", source = { registry = "https://pypi.org/simple" }, extra = ["toml"], marker = "python_full_version < '3.10'" }, + { name = "coverage", version = "7.11.0", source = { registry = "https://pypi.org/simple" }, extra = ["toml"], marker = "python_full_version >= '3.10'" }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, +] + +[[package]] +name = "pywin32-ctypes" +version = "0.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/85/9f/01a1a99704853cb63f253eea009390c88e7131c67e66a0a02099a8c917cb/pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755", size = 29471, upload-time = "2024-08-14T10:15:34.626Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/3d/8161f7711c017e01ac9f008dfddd9410dff3674334c233bde66e7ba65bbf/pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8", size = 30756, upload-time = "2024-08-14T10:15:33.187Z" }, +] + +[[package]] +name = "readme-renderer" +version = "44.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "nh3" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5a/a9/104ec9234c8448c4379768221ea6df01260cd6c2ce13182d4eac531c8342/readme_renderer-44.0.tar.gz", hash = "sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1", size = 32056, upload-time = "2024-07-08T15:00:57.805Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/67/921ec3024056483db83953ae8e48079ad62b92db7880013ca77632921dd0/readme_renderer-44.0-py3-none-any.whl", hash = "sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151", size = 13310, upload-time = "2024-07-08T15:00:56.577Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/61/d7545dafb7ac2230c70d38d31cbfe4cc64f7144dc41f6e4e4b78ecd9f5bb/requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6", size = 206888, upload-time = "2023-05-01T04:11:33.229Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06", size = 54481, upload-time = "2023-05-01T04:11:28.427Z" }, +] + +[[package]] +name = "rfc3986" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/85/40/1520d68bfa07ab5a6f065a186815fb6610c86fe957bc065754e47f7b0840/rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c", size = 49026, upload-time = "2022-01-10T00:52:30.832Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/9a/9afaade874b2fa6c752c36f1548f718b5b83af81ed9b76628329dab81c1b/rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd", size = 31326, upload-time = "2022-01-10T00:52:29.594Z" }, +] + +[[package]] +name = "rich" +version = "14.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py", version = "3.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "markdown-it-py", version = "4.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, +] + +[[package]] +name = "ruff" +version = "0.14.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/34/8218a19b2055b80601e8fd201ec723c74c7fe1ca06d525a43ed07b6d8e85/ruff-0.14.2.tar.gz", hash = "sha256:98da787668f239313d9c902ca7c523fe11b8ec3f39345553a51b25abc4629c96", size = 5539663, upload-time = "2025-10-23T19:37:00.956Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/dd/23eb2db5ad9acae7c845700493b72d3ae214dce0b226f27df89216110f2b/ruff-0.14.2-py3-none-linux_armv6l.whl", hash = "sha256:7cbe4e593505bdec5884c2d0a4d791a90301bc23e49a6b1eb642dd85ef9c64f1", size = 12533390, upload-time = "2025-10-23T19:36:18.044Z" }, + { url = "https://files.pythonhosted.org/packages/5a/8c/5f9acff43ddcf3f85130d0146d0477e28ccecc495f9f684f8f7119b74c0d/ruff-0.14.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:8d54b561729cee92f8d89c316ad7a3f9705533f5903b042399b6ae0ddfc62e11", size = 12887187, upload-time = "2025-10-23T19:36:22.664Z" }, + { url = "https://files.pythonhosted.org/packages/99/fa/047646491479074029665022e9f3dc6f0515797f40a4b6014ea8474c539d/ruff-0.14.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5c8753dfa44ebb2cde10ce5b4d2ef55a41fb9d9b16732a2c5df64620dbda44a3", size = 11925177, upload-time = "2025-10-23T19:36:24.778Z" }, + { url = "https://files.pythonhosted.org/packages/15/8b/c44cf7fe6e59ab24a9d939493a11030b503bdc2a16622cede8b7b1df0114/ruff-0.14.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d0bbeffb8d9f4fccf7b5198d566d0bad99a9cb622f1fc3467af96cb8773c9e3", size = 12358285, upload-time = "2025-10-23T19:36:26.979Z" }, + { url = "https://files.pythonhosted.org/packages/45/01/47701b26254267ef40369aea3acb62a7b23e921c27372d127e0f3af48092/ruff-0.14.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7047f0c5a713a401e43a88d36843d9c83a19c584e63d664474675620aaa634a8", size = 12303832, upload-time = "2025-10-23T19:36:29.192Z" }, + { url = "https://files.pythonhosted.org/packages/2d/5c/ae7244ca4fbdf2bee9d6405dcd5bc6ae51ee1df66eb7a9884b77b8af856d/ruff-0.14.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bf8d2f9aa1602599217d82e8e0af7fd33e5878c4d98f37906b7c93f46f9a839", size = 13036995, upload-time = "2025-10-23T19:36:31.861Z" }, + { url = "https://files.pythonhosted.org/packages/27/4c/0860a79ce6fd4c709ac01173f76f929d53f59748d0dcdd662519835dae43/ruff-0.14.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1c505b389e19c57a317cf4b42db824e2fca96ffb3d86766c1c9f8b96d32048a7", size = 14512649, upload-time = "2025-10-23T19:36:33.915Z" }, + { url = "https://files.pythonhosted.org/packages/7f/7f/d365de998069720a3abfc250ddd876fc4b81a403a766c74ff9bde15b5378/ruff-0.14.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a307fc45ebd887b3f26b36d9326bb70bf69b01561950cdcc6c0bdf7bb8e0f7cc", size = 14088182, upload-time = "2025-10-23T19:36:36.983Z" }, + { url = "https://files.pythonhosted.org/packages/6c/ea/d8e3e6b209162000a7be1faa41b0a0c16a133010311edc3329753cc6596a/ruff-0.14.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61ae91a32c853172f832c2f40bd05fd69f491db7289fb85a9b941ebdd549781a", size = 13599516, upload-time = "2025-10-23T19:36:39.208Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ea/c7810322086db68989fb20a8d5221dd3b79e49e396b01badca07b433ab45/ruff-0.14.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1967e40286f63ee23c615e8e7e98098dedc7301568bd88991f6e544d8ae096", size = 13272690, upload-time = "2025-10-23T19:36:41.453Z" }, + { url = "https://files.pythonhosted.org/packages/a9/39/10b05acf8c45786ef501d454e00937e1b97964f846bf28883d1f9619928a/ruff-0.14.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:2877f02119cdebf52a632d743a2e302dea422bfae152ebe2f193d3285a3a65df", size = 13496497, upload-time = "2025-10-23T19:36:43.61Z" }, + { url = "https://files.pythonhosted.org/packages/59/a1/1f25f8301e13751c30895092485fada29076e5e14264bdacc37202e85d24/ruff-0.14.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e681c5bc777de5af898decdcb6ba3321d0d466f4cb43c3e7cc2c3b4e7b843a05", size = 12266116, upload-time = "2025-10-23T19:36:45.625Z" }, + { url = "https://files.pythonhosted.org/packages/5c/fa/0029bfc9ce16ae78164e6923ef392e5f173b793b26cc39aa1d8b366cf9dc/ruff-0.14.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e21be42d72e224736f0c992cdb9959a2fa53c7e943b97ef5d081e13170e3ffc5", size = 12281345, upload-time = "2025-10-23T19:36:47.618Z" }, + { url = "https://files.pythonhosted.org/packages/a5/ab/ece7baa3c0f29b7683be868c024f0838770c16607bea6852e46b202f1ff6/ruff-0.14.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b8264016f6f209fac16262882dbebf3f8be1629777cf0f37e7aff071b3e9b92e", size = 12629296, upload-time = "2025-10-23T19:36:49.789Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7f/638f54b43f3d4e48c6a68062794e5b367ddac778051806b9e235dfb7aa81/ruff-0.14.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5ca36b4cb4db3067a3b24444463ceea5565ea78b95fe9a07ca7cb7fd16948770", size = 13371610, upload-time = "2025-10-23T19:36:51.882Z" }, + { url = "https://files.pythonhosted.org/packages/8d/35/3654a973ebe5b32e1fd4a08ed2d46755af7267da7ac710d97420d7b8657d/ruff-0.14.2-py3-none-win32.whl", hash = "sha256:41775927d287685e08f48d8eb3f765625ab0b7042cc9377e20e64f4eb0056ee9", size = 12415318, upload-time = "2025-10-23T19:36:53.961Z" }, + { url = "https://files.pythonhosted.org/packages/71/30/3758bcf9e0b6a4193a6f51abf84254aba00887dfa8c20aba18aa366c5f57/ruff-0.14.2-py3-none-win_amd64.whl", hash = "sha256:0df3424aa5c3c08b34ed8ce099df1021e3adaca6e90229273496b839e5a7e1af", size = 13565279, upload-time = "2025-10-23T19:36:56.578Z" }, + { url = "https://files.pythonhosted.org/packages/2e/5d/aa883766f8ef9ffbe6aa24f7192fb71632f31a30e77eb39aa2b0dc4290ac/ruff-0.14.2-py3-none-win_arm64.whl", hash = "sha256:ea9d635e83ba21569fbacda7e78afbfeb94911c9434aff06192d9bc23fd5495a", size = 12554956, upload-time = "2025-10-23T19:36:58.714Z" }, +] + +[[package]] +name = "secretstorage" +version = "3.3.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "cryptography", marker = "python_full_version < '3.10'" }, + { name = "jeepney", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/53/a4/f48c9d79cb507ed1373477dbceaba7401fd8a23af63b837fa61f1dcd3691/SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77", size = 19739, upload-time = "2022-08-13T16:22:46.976Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/24/b4293291fa1dd830f353d2cb163295742fa87f179fcc8a20a306a81978b7/SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99", size = 15221, upload-time = "2022-08-13T16:22:44.457Z" }, +] + +[[package]] +name = "secretstorage" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +dependencies = [ + { name = "cryptography", marker = "python_full_version >= '3.10'" }, + { name = "jeepney", marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/31/9f/11ef35cf1027c1339552ea7bfe6aaa74a8516d8b5caf6e7d338daf54fd80/secretstorage-3.4.0.tar.gz", hash = "sha256:c46e216d6815aff8a8a18706a2fbfd8d53fcbb0dce99301881687a1b0289ef7c", size = 19748, upload-time = "2025-09-09T16:42:13.859Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/ff/2e2eed29e02c14a5cb6c57f09b2d5b40e65d6cc71f45b52e0be295ccbc2f/secretstorage-3.4.0-py3-none-any.whl", hash = "sha256:0e3b6265c2c63509fb7415717607e4b2c9ab767b7f344a57473b779ca13bd02e", size = 15272, upload-time = "2025-09-09T16:42:12.744Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "tomli" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, + { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, + { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, + { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, + { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, + { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, + { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, + { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, + { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" }, + { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" }, + { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" }, + { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" }, + { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" }, + { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" }, + { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" }, + { url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" }, + { url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" }, + { url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" }, + { url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" }, + { url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" }, + { url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" }, + { url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" }, + { url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" }, + { url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" }, + { url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" }, + { url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" }, + { url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" }, + { url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" }, + { url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" }, + { url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" }, + { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, +] + +[[package]] +name = "twine" +version = "6.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "id" }, + { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, + { name = "keyring", marker = "platform_machine != 'ppc64le' and platform_machine != 's390x'" }, + { name = "packaging" }, + { name = "readme-renderer" }, + { name = "requests" }, + { name = "requests-toolbelt" }, + { name = "rfc3986" }, + { name = "rich" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e0/a8/949edebe3a82774c1ec34f637f5dd82d1cf22c25e963b7d63771083bbee5/twine-6.2.0.tar.gz", hash = "sha256:e5ed0d2fd70c9959770dce51c8f39c8945c574e18173a7b81802dab51b4b75cf", size = 172262, upload-time = "2025-09-04T15:43:17.255Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/7a/882d99539b19b1490cac5d77c67338d126e4122c8276bf640e411650c830/twine-6.2.0-py3-none-any.whl", hash = "sha256:418ebf08ccda9a8caaebe414433b0ba5e25eb5e4a927667122fbe8f829f985d8", size = 42727, upload-time = "2025-09-04T15:43:15.994Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +] From abe7bec759c580996acc2718f6c43c09552ae074 Mon Sep 17 00:00:00 2001 From: fat Date: Thu, 29 Jan 2026 20:34:32 -0800 Subject: [PATCH 2/4] tests --- packages/git-storage-sdk-go/client.go | 8 +- packages/git-storage-sdk-go/client_test.go | 285 ++ packages/git-storage-sdk-go/commit.go | 42 +- packages/git-storage-sdk-go/commit_pack.go | 2 +- .../commit_pack_extended_test.go | 615 +++ packages/git-storage-sdk-go/diff_commit.go | 35 +- .../diff_commit_extended_test.go | 174 + packages/git-storage-sdk-go/fetch.go | 2 +- packages/git-storage-sdk-go/helpers_test.go | 34 + packages/git-storage-sdk-go/moon.yml | 2 +- packages/git-storage-sdk-go/repo.go | 23 +- packages/git-storage-sdk-go/repo_test.go | 444 ++- packages/git-storage-sdk-go/util.go | 9 +- packages/git-storage-sdk-go/version_test.go | 29 + packages/git-storage-sdk-go/webhook.go | 22 +- packages/git-storage-sdk-go/webhook_test.go | 204 + packages/git-storage-sdk-node/AGENTS.md | 75 +- packages/git-storage-sdk-node/README.md | 99 +- packages/git-storage-sdk-node/package.json | 76 +- .../git-storage-sdk-node/src/commit-pack.ts | 202 +- packages/git-storage-sdk-node/src/commit.ts | 738 ++-- .../git-storage-sdk-node/src/diff-commit.ts | 531 +-- packages/git-storage-sdk-node/src/errors.ts | 68 +- packages/git-storage-sdk-node/src/fetch.ts | 287 +- packages/git-storage-sdk-node/src/index.ts | 2616 +++++++------ packages/git-storage-sdk-node/src/schemas.ts | 234 +- .../git-storage-sdk-node/src/stream-utils.ts | 433 ++- packages/git-storage-sdk-node/src/types.ts | 720 ++-- packages/git-storage-sdk-node/src/util.ts | 75 +- packages/git-storage-sdk-node/src/version.ts | 2 +- packages/git-storage-sdk-node/src/webhook.ts | 483 +-- .../tests/commit-from-diff.test.ts | 641 +-- .../git-storage-sdk-node/tests/commit.test.ts | 1640 ++++---- .../tests/full-workflow.js | 1836 ++++----- .../git-storage-sdk-node/tests/index.test.ts | 3437 +++++++++-------- .../tests/version.test.ts | 101 +- .../tests/webhook.test.ts | 790 ++-- packages/git-storage-sdk-node/tsconfig.json | 14 +- .../git-storage-sdk-node/tsconfig.tsup.json | 22 +- packages/git-storage-sdk-node/tsup.config.ts | 34 +- .../git-storage-sdk-node/vitest.config.ts | 14 +- .../git-storage-sdk-python/DEVELOPMENT.md | 16 +- .../git-storage-sdk-python/PROJECT_SUMMARY.md | 32 +- packages/git-storage-sdk-python/PUBLISHING.md | 24 +- packages/git-storage-sdk-python/QUICKSTART.md | 7 +- packages/git-storage-sdk-python/README.md | 77 +- packages/git-storage-sdk-python/moon.yml | 8 +- 47 files changed, 9828 insertions(+), 7434 deletions(-) create mode 100644 packages/git-storage-sdk-go/commit_pack_extended_test.go create mode 100644 packages/git-storage-sdk-go/diff_commit_extended_test.go create mode 100644 packages/git-storage-sdk-go/version_test.go create mode 100644 packages/git-storage-sdk-go/webhook_test.go diff --git a/packages/git-storage-sdk-go/client.go b/packages/git-storage-sdk-go/client.go index 4df6f1ef6..dab6a295b 100644 --- a/packages/git-storage-sdk-go/client.go +++ b/packages/git-storage-sdk-go/client.go @@ -24,7 +24,7 @@ const ( // NewClient creates a Git storage client. func NewClient(options Options) (*Client, error) { if strings.TrimSpace(options.Name) == "" || strings.TrimSpace(options.Key) == "" { - return nil, errors.New("GitStorage requires a name and key. Please check your configuration and try again.") + return nil, errors.New("git storage requires a name and key") } apiBaseURL := options.APIBaseURL @@ -168,7 +168,7 @@ func (c *Client) CreateRepo(ctx context.Context, options CreateRepoOptions) (*Re } defer resp.Body.Close() if resp.StatusCode == 409 { - return nil, errors.New("Repository already exists") + return nil, errors.New("repository already exists") } if resolvedDefaultBranch == "" { @@ -281,10 +281,10 @@ func (c *Client) DeleteRepo(ctx context.Context, options DeleteRepoOptions) (Del defer resp.Body.Close() if resp.StatusCode == 404 { - return DeleteRepoResult{}, errors.New("Repository not found") + return DeleteRepoResult{}, errors.New("repository not found") } if resp.StatusCode == 409 { - return DeleteRepoResult{}, errors.New("Repository already deleted") + return DeleteRepoResult{}, errors.New("repository already deleted") } var payload struct { diff --git a/packages/git-storage-sdk-go/client_test.go b/packages/git-storage-sdk-go/client_test.go index ea5913cf9..90424af2b 100644 --- a/packages/git-storage-sdk-go/client_test.go +++ b/packages/git-storage-sdk-go/client_test.go @@ -101,6 +101,158 @@ func TestCreateRepoForkBaseRepo(t *testing.T) { } } +func TestCreateRepoGitHubBaseRepoDefaultBranch(t *testing.T) { + var receivedBody map[string]interface{} + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + decoder := json.NewDecoder(r.Body) + _ = decoder.Decode(&receivedBody) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"repo_id":"repo","url":"https://repo.git"}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + + _, err = client.CreateRepo(nil, CreateRepoOptions{ + BaseRepo: GitHubBaseRepo{ + Owner: "octocat", + Name: "hello-world", + DefaultBranch: "main", + }, + }) + if err != nil { + t.Fatalf("create repo error: %v", err) + } + + baseRepo, ok := receivedBody["base_repo"].(map[string]interface{}) + if !ok { + t.Fatalf("expected base_repo payload") + } + if baseRepo["provider"] != "github" { + t.Fatalf("expected provider github") + } + if baseRepo["default_branch"] != "main" { + t.Fatalf("expected default_branch main") + } + if receivedBody["default_branch"] != "main" { + t.Fatalf("expected default_branch main in request") + } +} + +func TestCreateRepoGitHubBaseRepoCustomDefaultBranch(t *testing.T) { + var receivedBody map[string]interface{} + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + decoder := json.NewDecoder(r.Body) + _ = decoder.Decode(&receivedBody) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"repo_id":"repo","url":"https://repo.git"}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + + _, err = client.CreateRepo(nil, CreateRepoOptions{ + BaseRepo: GitHubBaseRepo{ + Owner: "octocat", + Name: "hello-world", + }, + DefaultBranch: "develop", + }) + if err != nil { + t.Fatalf("create repo error: %v", err) + } + + if receivedBody["default_branch"] != "develop" { + t.Fatalf("expected default_branch develop in request") + } +} + +func TestCreateRepoForkBaseRepoTokenScopes(t *testing.T) { + var receivedBody map[string]interface{} + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + decoder := json.NewDecoder(r.Body) + _ = decoder.Decode(&receivedBody) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"repo_id":"repo","url":"https://repo.git"}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + + _, err = client.CreateRepo(nil, CreateRepoOptions{ + BaseRepo: ForkBaseRepo{ID: "template", Ref: "develop"}, + }) + if err != nil { + t.Fatalf("create repo error: %v", err) + } + + baseRepo, ok := receivedBody["base_repo"].(map[string]interface{}) + if !ok { + t.Fatalf("expected base_repo payload") + } + auth, ok := baseRepo["auth"].(map[string]interface{}) + if !ok { + t.Fatalf("expected auth payload") + } + token, _ := auth["token"].(string) + claims := parseJWTFromToken(t, token) + if claims["repo"] != "template" { + t.Fatalf("expected repo claim template") + } + scopes, ok := claims["scopes"].([]interface{}) + if !ok || len(scopes) != 1 || scopes[0] != "git:read" { + t.Fatalf("expected git:read scope") + } +} + +func TestCreateRepoConflict(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusConflict) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + + _, err = client.CreateRepo(nil, CreateRepoOptions{ID: "existing-repo"}) + if err == nil || !strings.Contains(err.Error(), "repository already exists") { + t.Fatalf("expected repository already exists error, got %v", err) + } +} + +func TestListReposCursorLimit(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + q := r.URL.Query() + if q.Get("cursor") != "cursor-1" || q.Get("limit") != "25" { + t.Fatalf("unexpected query: %s", r.URL.RawQuery) + } + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"repos":[],"has_more":false}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + + _, err = client.ListRepos(nil, ListReposOptions{Cursor: "cursor-1", Limit: 25}) + if err != nil { + t.Fatalf("list repos error: %v", err) + } +} + func TestListReposScopes(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { token := strings.TrimPrefix(r.Header.Get("Authorization"), "Bearer ") @@ -108,6 +260,10 @@ func TestListReposScopes(t *testing.T) { if claims["repo"] != "org" { t.Fatalf("expected repo org") } + scopes, ok := claims["scopes"].([]interface{}) + if !ok || len(scopes) != 1 || scopes[0] != "org:read" { + t.Fatalf("expected org:read scope") + } w.Header().Set("Content-Type", "application/json") _, _ = w.Write([]byte(`{"repos":[],"has_more":false}`)) })) @@ -124,6 +280,50 @@ func TestListReposScopes(t *testing.T) { } } +func TestFindOneReturnsRepo(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/api/v1/repo" { + t.Fatalf("unexpected path: %s", r.URL.Path) + } + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"default_branch":"develop"}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL, StorageBaseURL: "acme.code.storage"}) + if err != nil { + t.Fatalf("client error: %v", err) + } + + repo, err := client.FindOne(nil, FindOneOptions{ID: "repo-1"}) + if err != nil { + t.Fatalf("find one error: %v", err) + } + if repo == nil || repo.DefaultBranch != "develop" { + t.Fatalf("unexpected repo result") + } +} + +func TestFindOneNotFound(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusNotFound) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + + repo, err := client.FindOne(nil, FindOneOptions{ID: "repo-1"}) + if err != nil { + t.Fatalf("find one error: %v", err) + } + if repo != nil { + t.Fatalf("expected nil repo") + } +} + func TestDeleteRepoTTL(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { token := strings.TrimPrefix(r.Header.Get("Authorization"), "Bearer ") @@ -149,6 +349,67 @@ func TestDeleteRepoTTL(t *testing.T) { } } +func TestDeleteRepoNotFound(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusNotFound) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + + _, err = client.DeleteRepo(nil, DeleteRepoOptions{ID: "missing"}) + if err == nil || !strings.Contains(err.Error(), "repository not found") { + t.Fatalf("expected repository not found error, got %v", err) + } +} + +func TestDeleteRepoAlreadyDeleted(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusConflict) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + + _, err = client.DeleteRepo(nil, DeleteRepoOptions{ID: "deleted"}) + if err == nil || !strings.Contains(err.Error(), "repository already deleted") { + t.Fatalf("expected repository already deleted error, got %v", err) + } +} + +func TestDeleteRepoScope(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + token := strings.TrimPrefix(r.Header.Get("Authorization"), "Bearer ") + claims := parseJWTFromToken(t, token) + if claims["repo"] != "repo-delete" { + t.Fatalf("expected repo claim") + } + scopes, ok := claims["scopes"].([]interface{}) + if !ok || len(scopes) != 1 || scopes[0] != "repo:write" { + t.Fatalf("expected repo:write scope") + } + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"repo_id":"repo-delete","message":"ok"}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + + _, err = client.DeleteRepo(nil, DeleteRepoOptions{ID: "repo-delete"}) + if err != nil { + t.Fatalf("delete repo error: %v", err) + } +} + func TestConfigAliases(t *testing.T) { client, err := NewCodeStorage(Options{Name: "acme", Key: testKey}) if err != nil { @@ -168,3 +429,27 @@ func TestAliasConstructors(t *testing.T) { t.Fatalf("NewCodeStorage error: %v", err) } } + +func TestCreateRepoUserAgentHeader(t *testing.T) { + var headerAgent string + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + headerAgent = r.Header.Get("Code-Storage-Agent") + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"repo_id":"repo","url":"https://repo.git"}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + + _, err = client.CreateRepo(nil, CreateRepoOptions{ID: "repo"}) + if err != nil { + t.Fatalf("create repo error: %v", err) + } + + if headerAgent == "" || !strings.Contains(headerAgent, "code-storage-go-sdk/") { + t.Fatalf("missing Code-Storage-Agent header") + } +} diff --git a/packages/git-storage-sdk-go/commit.go b/packages/git-storage-sdk-go/commit.go index 85e0ac4da..a1730ab3e 100644 --- a/packages/git-storage-sdk-go/commit.go +++ b/packages/git-storage-sdk-go/commit.go @@ -209,7 +209,7 @@ func (b *CommitBuilder) Send(ctx context.Context) (CommitResult, error) { func (b *CommitBuilder) ensureNotSent() error { if b.sent { - return errors.New("createCommit builder cannot be reused after send()") + return errors.New("createCommit builder cannot be reused after send") } return nil } @@ -262,27 +262,26 @@ func buildCommitMetadata(options CommitOptions, ops []commitOperation) *commitMe func writeBlobChunks(encoder *json.Encoder, contentID string, reader io.Reader) error { buf := make([]byte, maxChunkBytes) - emitted := false + var pending []byte for { n, err := reader.Read(buf) if n > 0 { - payload := blobChunkEnvelope{ - BlobChunk: blobChunkPayload{ - ContentID: contentID, - Data: base64.StdEncoding.EncodeToString(buf[:n]), - EOF: err == io.EOF, - }, - } - emitted = true - if err := encoder.Encode(payload); err != nil { - return err - } - if err == io.EOF { - return nil + if pending != nil { + payload := blobChunkEnvelope{ + BlobChunk: blobChunkPayload{ + ContentID: contentID, + Data: base64.StdEncoding.EncodeToString(pending), + EOF: false, + }, + } + if err := encoder.Encode(payload); err != nil { + return err + } } + pending = append(pending[:0], buf[:n]...) } if err == io.EOF { - if !emitted { + if pending == nil { payload := blobChunkEnvelope{ BlobChunk: blobChunkPayload{ ContentID: contentID, @@ -292,7 +291,14 @@ func writeBlobChunks(encoder *json.Encoder, contentID string, reader io.Reader) } return encoder.Encode(payload) } - return nil + payload := blobChunkEnvelope{ + BlobChunk: blobChunkPayload{ + ContentID: contentID, + Data: base64.StdEncoding.EncodeToString(pending), + EOF: true, + }, + } + return encoder.Encode(payload) } if err != nil { return err @@ -303,7 +309,7 @@ func writeBlobChunks(encoder *json.Encoder, contentID string, reader io.Reader) func normalizePath(path string) (string, error) { path = strings.TrimSpace(path) if path == "" { - return "", errors.New("File path must be a non-empty string") + return "", errors.New("file path must be a non-empty string") } return strings.TrimPrefix(path, "/"), nil } diff --git a/packages/git-storage-sdk-go/commit_pack.go b/packages/git-storage-sdk-go/commit_pack.go index 6650cfab8..ff52c4a28 100644 --- a/packages/git-storage-sdk-go/commit_pack.go +++ b/packages/git-storage-sdk-go/commit_pack.go @@ -59,7 +59,7 @@ func buildCommitResult(ack commitPackAck) (CommitResult, error) { if !ack.Result.Success { message := ack.Result.Message if strings.TrimSpace(message) == "" { - message = "Commit failed with status " + ack.Result.Status + message = "commit failed with status " + ack.Result.Status } return CommitResult{}, newRefUpdateError(message, ack.Result.Status, &refUpdate) } diff --git a/packages/git-storage-sdk-go/commit_pack_extended_test.go b/packages/git-storage-sdk-go/commit_pack_extended_test.go new file mode 100644 index 000000000..26397d8ad --- /dev/null +++ b/packages/git-storage-sdk-go/commit_pack_extended_test.go @@ -0,0 +1,615 @@ +package storage + +import ( + "bytes" + "encoding/json" + "errors" + "net/http" + "net/http/httptest" + "strings" + "testing" + "time" +) + +func TestCommitPackStreamsMetadataAndChunks(t *testing.T) { + var requestPath string + var headerAgent string + var headerContentType string + var lines []string + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + requestPath = r.URL.Path + headerAgent = r.Header.Get("Code-Storage-Agent") + headerContentType = r.Header.Get("Content-Type") + lines = readNDJSONLines(t, r.Body) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"commit":{"commit_sha":"abc123","tree_sha":"def456","target_branch":"main","pack_bytes":42,"blob_count":1},"result":{"branch":"main","old_sha":"0000000000000000000000000000000000000000","new_sha":"abc123","success":true,"status":"ok"}}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + builder, err := repo.CreateCommit(CommitOptions{ + TargetBranch: "main", + CommitMessage: "Update docs", + Author: CommitSignature{Name: "Author Name", Email: "author@example.com"}, + }) + if err != nil { + t.Fatalf("builder error: %v", err) + } + + builder, err = builder.AddFileFromString("docs/readme.md", "# v2.0.1\n- add streaming SDK\n", nil) + if err != nil { + t.Fatalf("add file error: %v", err) + } + builder, err = builder.DeletePath("docs/old.txt") + if err != nil { + t.Fatalf("delete path error: %v", err) + } + + result, err := builder.Send(nil) + if err != nil { + t.Fatalf("send error: %v", err) + } + + if requestPath != "/api/v1/repos/commit-pack" { + t.Fatalf("unexpected path: %s", requestPath) + } + if headerContentType != "application/x-ndjson" { + t.Fatalf("unexpected content type: %s", headerContentType) + } + if headerAgent == "" || !strings.Contains(headerAgent, "code-storage-go-sdk/") { + t.Fatalf("missing Code-Storage-Agent header") + } + if len(lines) != 2 { + t.Fatalf("expected 2 ndjson lines, got %d", len(lines)) + } + + var metadataEnvelope map[string]interface{} + if err := json.Unmarshal([]byte(lines[0]), &metadataEnvelope); err != nil { + t.Fatalf("decode metadata: %v", err) + } + metadata, ok := metadataEnvelope["metadata"].(map[string]interface{}) + if !ok { + t.Fatalf("missing metadata") + } + if metadata["commit_message"] != "Update docs" { + t.Fatalf("unexpected commit_message: %#v", metadata["commit_message"]) + } + if _, ok := metadata["ephemeral"]; ok { + t.Fatalf("did not expect ephemeral in metadata") + } + if _, ok := metadata["ephemeral_base"]; ok { + t.Fatalf("did not expect ephemeral_base in metadata") + } + + files, ok := metadata["files"].([]interface{}) + if !ok || len(files) != 2 { + t.Fatalf("expected two file entries") + } + + var contentID string + var sawUpsert bool + var sawDelete bool + for _, entry := range files { + fileEntry, ok := entry.(map[string]interface{}) + if !ok { + continue + } + switch fileEntry["operation"] { + case "upsert": + sawUpsert = true + contentID, _ = fileEntry["content_id"].(string) + if fileEntry["path"] != "docs/readme.md" { + t.Fatalf("unexpected upsert path: %#v", fileEntry["path"]) + } + case "delete": + sawDelete = true + if fileEntry["path"] != "docs/old.txt" { + t.Fatalf("unexpected delete path: %#v", fileEntry["path"]) + } + } + } + if !sawUpsert || !sawDelete { + t.Fatalf("expected upsert and delete operations") + } + if contentID == "" { + t.Fatalf("missing content_id") + } + + var chunkEnvelope map[string]interface{} + if err := json.Unmarshal([]byte(lines[1]), &chunkEnvelope); err != nil { + t.Fatalf("decode chunk: %v", err) + } + chunk, ok := chunkEnvelope["blob_chunk"].(map[string]interface{}) + if !ok { + t.Fatalf("missing blob_chunk") + } + if chunk["content_id"] != contentID { + t.Fatalf("content_id mismatch") + } + if eof, _ := chunk["eof"].(bool); !eof { + t.Fatalf("expected eof true") + } + data := decodeBase64(t, chunk["data"].(string)) + if string(data) != "# v2.0.1\n- add streaming SDK\n" { + t.Fatalf("unexpected chunk data: %s", string(data)) + } + + if result.CommitSHA != "abc123" || result.TreeSHA != "def456" { + t.Fatalf("unexpected result: %#v", result) + } + if result.RefUpdate.NewSHA != "abc123" { + t.Fatalf("unexpected ref update") + } +} + +func TestCommitPackIncludesBaseBranch(t *testing.T) { + var lines []string + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + lines = readNDJSONLines(t, r.Body) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"commit":{"commit_sha":"deadbeef","tree_sha":"cafebabe","target_branch":"feature/one","pack_bytes":1,"blob_count":0},"result":{"branch":"feature/one","old_sha":"0000000000000000000000000000000000000000","new_sha":"deadbeef","success":true,"status":"ok"}}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + builder, err := repo.CreateCommit(CommitOptions{ + TargetBranch: "feature/one", + BaseBranch: "main", + ExpectedHeadSHA: "abc123", + CommitMessage: "branch off main", + Author: CommitSignature{Name: "Author Name", Email: "author@example.com"}, + }) + if err != nil { + t.Fatalf("builder error: %v", err) + } + builder, err = builder.AddFileFromString("docs/base.txt", "hello", nil) + if err != nil { + t.Fatalf("add file error: %v", err) + } + if _, err := builder.Send(nil); err != nil { + t.Fatalf("send error: %v", err) + } + + if len(lines) == 0 { + t.Fatalf("expected metadata line") + } + var metadataEnvelope map[string]interface{} + if err := json.Unmarshal([]byte(lines[0]), &metadataEnvelope); err != nil { + t.Fatalf("decode metadata: %v", err) + } + metadata := metadataEnvelope["metadata"].(map[string]interface{}) + if metadata["target_branch"] != "feature/one" { + t.Fatalf("unexpected target_branch") + } + if metadata["expected_head_sha"] != "abc123" { + t.Fatalf("unexpected expected_head_sha") + } + if metadata["base_branch"] != "main" { + t.Fatalf("unexpected base_branch") + } +} + +func TestCommitPackIncludesBaseBranchWithoutExpectedHead(t *testing.T) { + var lines []string + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + lines = readNDJSONLines(t, r.Body) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"commit":{"commit_sha":"abc123","tree_sha":"def456","target_branch":"feature/one","pack_bytes":1,"blob_count":1},"result":{"branch":"feature/one","old_sha":"0000000000000000000000000000000000000000","new_sha":"abc123","success":true,"status":"ok"}}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + builder, err := repo.CreateCommit(CommitOptions{ + TargetBranch: "feature/one", + BaseBranch: "main", + CommitMessage: "branch off", + Author: CommitSignature{Name: "Author Name", Email: "author@example.com"}, + }) + if err != nil { + t.Fatalf("builder error: %v", err) + } + builder, err = builder.AddFileFromString("docs/base.txt", "hello", nil) + if err != nil { + t.Fatalf("add file error: %v", err) + } + if _, err := builder.Send(nil); err != nil { + t.Fatalf("send error: %v", err) + } + + var metadataEnvelope map[string]interface{} + if err := json.Unmarshal([]byte(lines[0]), &metadataEnvelope); err != nil { + t.Fatalf("decode metadata: %v", err) + } + metadata := metadataEnvelope["metadata"].(map[string]interface{}) + if metadata["base_branch"] != "main" { + t.Fatalf("unexpected base_branch") + } + if _, ok := metadata["expected_head_sha"]; ok { + t.Fatalf("did not expect expected_head_sha") + } +} + +func TestCommitPackIncludesEphemeralFlags(t *testing.T) { + var lines []string + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + lines = readNDJSONLines(t, r.Body) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"commit":{"commit_sha":"eph123","tree_sha":"eph456","target_branch":"feature/demo","pack_bytes":1,"blob_count":1},"result":{"branch":"feature/demo","old_sha":"0000000000000000000000000000000000000000","new_sha":"eph123","success":true,"status":"ok"}}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + builder, err := repo.CreateCommit(CommitOptions{ + TargetBranch: "feature/demo", + BaseBranch: "feature/base", + Ephemeral: true, + EphemeralBase: true, + CommitMessage: "ephemeral commit", + Author: CommitSignature{Name: "Author Name", Email: "author@example.com"}, + }) + if err != nil { + t.Fatalf("builder error: %v", err) + } + builder, err = builder.AddFileFromString("docs/ephemeral.txt", "hello", nil) + if err != nil { + t.Fatalf("add file error: %v", err) + } + if _, err := builder.Send(nil); err != nil { + t.Fatalf("send error: %v", err) + } + + var metadataEnvelope map[string]interface{} + if err := json.Unmarshal([]byte(lines[0]), &metadataEnvelope); err != nil { + t.Fatalf("decode metadata: %v", err) + } + metadata := metadataEnvelope["metadata"].(map[string]interface{}) + if metadata["ephemeral"] != true || metadata["ephemeral_base"] != true { + t.Fatalf("expected ephemeral flags") + } +} + +func TestCommitPackAcceptsReaderSources(t *testing.T) { + var lines []string + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + lines = readNDJSONLines(t, r.Body) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"commit":{"commit_sha":"feedbeef","tree_sha":"c0ffee42","target_branch":"main","pack_bytes":128,"blob_count":2},"result":{"branch":"main","old_sha":"0000000000000000000000000000000000000000","new_sha":"feedbeef","success":true,"status":"ok"}}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + builder, err := repo.CreateCommit(CommitOptions{ + TargetBranch: "main", + CommitMessage: "Add mixed sources", + Author: CommitSignature{Name: "Author Name", Email: "author@example.com"}, + }) + if err != nil { + t.Fatalf("builder error: %v", err) + } + builder, err = builder.AddFile("assets/blob.bin", []byte("blob-payload"), nil) + if err != nil { + t.Fatalf("add file error: %v", err) + } + builder, err = builder.AddFile("assets/stream.bin", strings.NewReader("streamed-payload"), nil) + if err != nil { + t.Fatalf("add file error: %v", err) + } + if _, err := builder.Send(nil); err != nil { + t.Fatalf("send error: %v", err) + } + + var metadataEnvelope map[string]interface{} + if err := json.Unmarshal([]byte(lines[0]), &metadataEnvelope); err != nil { + t.Fatalf("decode metadata: %v", err) + } + metadata := metadataEnvelope["metadata"].(map[string]interface{}) + files := metadata["files"].([]interface{}) + if len(files) != 2 { + t.Fatalf("expected two file entries") + } + + contentIDs := make(map[string]string, 2) + for _, entry := range files { + fileEntry := entry.(map[string]interface{}) + path := fileEntry["path"].(string) + contentIDs[path] = fileEntry["content_id"].(string) + } + + chunkFrames := lines[1:] + if len(chunkFrames) != 2 { + t.Fatalf("expected two chunk frames") + } + decoded := map[string]string{} + for _, frame := range chunkFrames { + var envelope map[string]interface{} + if err := json.Unmarshal([]byte(frame), &envelope); err != nil { + t.Fatalf("decode chunk: %v", err) + } + chunk := envelope["blob_chunk"].(map[string]interface{}) + contentID := chunk["content_id"].(string) + data := decodeBase64(t, chunk["data"].(string)) + decoded[contentID] = string(data) + } + + if decoded[contentIDs["assets/blob.bin"]] != "blob-payload" { + t.Fatalf("unexpected blob payload") + } + if decoded[contentIDs["assets/stream.bin"]] != "streamed-payload" { + t.Fatalf("unexpected stream payload") + } +} + +func TestCommitPackSplitsLargePayloads(t *testing.T) { + var lines []string + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + lines = readNDJSONLines(t, r.Body) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"commit":{"commit_sha":"chunk123","tree_sha":"tree456","target_branch":"main","pack_bytes":4194314,"blob_count":1},"result":{"branch":"main","old_sha":"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa","new_sha":"chunk123","success":true,"status":"ok"}}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + payload := bytes.Repeat([]byte{'a'}, maxChunkBytes+10) + builder, err := repo.CreateCommit(CommitOptions{ + TargetBranch: "main", + CommitMessage: "Large commit", + Author: CommitSignature{Name: "Author Name", Email: "author@example.com"}, + }) + if err != nil { + t.Fatalf("builder error: %v", err) + } + builder, err = builder.AddFile("large.bin", payload, nil) + if err != nil { + t.Fatalf("add file error: %v", err) + } + if _, err := builder.Send(nil); err != nil { + t.Fatalf("send error: %v", err) + } + + if len(lines) != 3 { + t.Fatalf("expected 3 ndjson lines, got %d", len(lines)) + } + + var firstChunk map[string]interface{} + var secondChunk map[string]interface{} + _ = json.Unmarshal([]byte(lines[1]), &firstChunk) + _ = json.Unmarshal([]byte(lines[2]), &secondChunk) + + chunk1 := firstChunk["blob_chunk"].(map[string]interface{}) + chunk2 := secondChunk["blob_chunk"].(map[string]interface{}) + + decoded1 := decodeBase64(t, chunk1["data"].(string)) + decoded2 := decodeBase64(t, chunk2["data"].(string)) + + if len(decoded1) != maxChunkBytes { + t.Fatalf("unexpected first chunk size: %d", len(decoded1)) + } + if eof, _ := chunk1["eof"].(bool); eof { + t.Fatalf("unexpected eof true for first chunk") + } + if len(decoded2) != 10 { + t.Fatalf("unexpected second chunk size: %d", len(decoded2)) + } + if eof, _ := chunk2["eof"].(bool); !eof { + t.Fatalf("expected eof true for last chunk") + } +} + +func TestCommitPackMissingAuthor(t *testing.T) { + client, err := NewClient(Options{Name: "acme", Key: testKey}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + _, err = repo.CreateCommit(CommitOptions{ + TargetBranch: "main", + CommitMessage: "Missing author", + }) + if err == nil || !strings.Contains(err.Error(), "author name and email are required") { + t.Fatalf("expected missing author error, got %v", err) + } +} + +func TestCommitPackLegacyTargetRef(t *testing.T) { + var lines []string + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + lines = readNDJSONLines(t, r.Body) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"commit":{"commit_sha":"legacy123","tree_sha":"legacy456","target_branch":"main","pack_bytes":0,"blob_count":0},"result":{"branch":"main","old_sha":"0000000000000000000000000000000000000000","new_sha":"legacy123","success":true,"status":"ok"}}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + builder, err := repo.CreateCommit(CommitOptions{ + TargetRef: "refs/heads/main", + CommitMessage: "Legacy path", + Author: CommitSignature{Name: "Legacy Author", Email: "legacy@example.com"}, + }) + if err != nil { + t.Fatalf("builder error: %v", err) + } + if _, err := builder.Send(nil); err != nil { + t.Fatalf("send error: %v", err) + } + + var metadataEnvelope map[string]interface{} + if err := json.Unmarshal([]byte(lines[0]), &metadataEnvelope); err != nil { + t.Fatalf("decode metadata: %v", err) + } + metadata := metadataEnvelope["metadata"].(map[string]interface{}) + if metadata["target_branch"] != "main" { + t.Fatalf("unexpected target_branch: %#v", metadata["target_branch"]) + } +} + +func TestCommitPackAcceptsBinaryBytes(t *testing.T) { + var lines []string + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + lines = readNDJSONLines(t, r.Body) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"commit":{"commit_sha":"enc123","tree_sha":"treeenc","target_branch":"main","pack_bytes":12,"blob_count":1},"result":{"branch":"main","old_sha":"0000000000000000000000000000000000000000","new_sha":"enc123","success":true,"status":"ok"}}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + payload := []byte{0xa1, 'H', 'o', 'l', 'a', '!'} + builder, err := repo.CreateCommit(CommitOptions{ + TargetBranch: "main", + CommitMessage: "Add greeting", + Author: CommitSignature{Name: "Author Name", Email: "author@example.com"}, + }) + if err != nil { + t.Fatalf("builder error: %v", err) + } + builder, err = builder.AddFile("docs/hola.txt", payload, nil) + if err != nil { + t.Fatalf("add file error: %v", err) + } + if _, err := builder.Send(nil); err != nil { + t.Fatalf("send error: %v", err) + } + + var chunkEnvelope map[string]interface{} + if err := json.Unmarshal([]byte(lines[1]), &chunkEnvelope); err != nil { + t.Fatalf("decode chunk: %v", err) + } + chunk := chunkEnvelope["blob_chunk"].(map[string]interface{}) + decoded := decodeBase64(t, chunk["data"].(string)) + if !bytes.Equal(decoded, payload) { + t.Fatalf("unexpected binary payload") + } +} + +func TestCommitPackHonorsTTL(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + token := strings.TrimPrefix(r.Header.Get("Authorization"), "Bearer ") + claims := parseJWTFromToken(t, token) + exp := int64(claims["exp"].(float64)) + iat := int64(claims["iat"].(float64)) + if exp-iat != 4321 { + t.Fatalf("expected ttl 4321, got %d", exp-iat) + } + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"commit":{"commit_sha":"legacy123","tree_sha":"treetree","target_branch":"main","pack_bytes":16,"blob_count":1},"result":{"branch":"main","old_sha":"0000000000000000000000000000000000000000","new_sha":"legacy123","success":true,"status":"ok"}}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + builder, err := repo.CreateCommit(CommitOptions{ + InvocationOptions: InvocationOptions{TTL: 4321 * time.Second}, + TargetBranch: "main", + CommitMessage: "Legacy ttl commit", + Author: CommitSignature{Name: "Author Name", Email: "author@example.com"}, + }) + if err != nil { + t.Fatalf("builder error: %v", err) + } + builder, err = builder.AddFileFromString("docs/legacy.txt", "legacy ttl content", nil) + if err != nil { + t.Fatalf("add file error: %v", err) + } + if _, err := builder.Send(nil); err != nil { + t.Fatalf("send error: %v", err) + } +} + +func TestCommitPackRejectsBaseBranchWithRefsPrefix(t *testing.T) { + client, err := NewClient(Options{Name: "acme", Key: testKey}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + _, err = repo.CreateCommit(CommitOptions{ + TargetBranch: "feature/two", + BaseBranch: "refs/heads/main", + ExpectedHeadSHA: "abc123", + CommitMessage: "branch", + Author: CommitSignature{Name: "Author Name", Email: "author@example.com"}, + }) + if err == nil || !strings.Contains(err.Error(), "baseBranch must not include refs/") { + t.Fatalf("expected baseBranch validation error, got %v", err) + } +} + +func TestCommitPackReturnsRefUpdateErrorOnFailure(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"commit":{"commit_sha":"deadbeef","tree_sha":"feedbabe","target_branch":"main","pack_bytes":0,"blob_count":0},"result":{"branch":"main","old_sha":"1234567890123456789012345678901234567890","new_sha":"deadbeef","success":false,"status":"precondition_failed","message":"base mismatch"}}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + builder, err := repo.CreateCommit(CommitOptions{ + TargetBranch: "main", + CommitMessage: "bad commit", + Author: CommitSignature{Name: "Author Name", Email: "author@example.com"}, + }) + if err != nil { + t.Fatalf("builder error: %v", err) + } + builder, err = builder.AddFileFromString("docs/readme.md", "oops", nil) + if err != nil { + t.Fatalf("add file error: %v", err) + } + _, err = builder.Send(nil) + if err == nil { + t.Fatalf("expected error") + } + var refErr *RefUpdateError + if !errors.As(err, &refErr) { + t.Fatalf("expected RefUpdateError, got %T", err) + } +} diff --git a/packages/git-storage-sdk-go/diff_commit.go b/packages/git-storage-sdk-go/diff_commit.go index b99a87663..656ea9441 100644 --- a/packages/git-storage-sdk-go/diff_commit.go +++ b/packages/git-storage-sdk-go/diff_commit.go @@ -153,26 +153,25 @@ func buildDiffCommitMetadata(options CommitFromDiffOptions) *commitMetadataPaylo func writeDiffChunks(encoder *json.Encoder, reader io.Reader) error { buf := make([]byte, maxChunkBytes) - emitted := false + var pending []byte for { n, err := reader.Read(buf) if n > 0 { - payload := diffChunkEnvelope{ - DiffChunk: diffChunkPayload{ - Data: base64.StdEncoding.EncodeToString(buf[:n]), - EOF: err == io.EOF, - }, - } - emitted = true - if err := encoder.Encode(payload); err != nil { - return err - } - if err == io.EOF { - return nil + if pending != nil { + payload := diffChunkEnvelope{ + DiffChunk: diffChunkPayload{ + Data: base64.StdEncoding.EncodeToString(pending), + EOF: false, + }, + } + if err := encoder.Encode(payload); err != nil { + return err + } } + pending = append(pending[:0], buf[:n]...) } if err == io.EOF { - if !emitted { + if pending == nil { payload := diffChunkEnvelope{ DiffChunk: diffChunkPayload{ Data: "", @@ -181,7 +180,13 @@ func writeDiffChunks(encoder *json.Encoder, reader io.Reader) error { } return encoder.Encode(payload) } - return nil + payload := diffChunkEnvelope{ + DiffChunk: diffChunkPayload{ + Data: base64.StdEncoding.EncodeToString(pending), + EOF: true, + }, + } + return encoder.Encode(payload) } if err != nil { return err diff --git a/packages/git-storage-sdk-go/diff_commit_extended_test.go b/packages/git-storage-sdk-go/diff_commit_extended_test.go new file mode 100644 index 000000000..416e6355e --- /dev/null +++ b/packages/git-storage-sdk-go/diff_commit_extended_test.go @@ -0,0 +1,174 @@ +package storage + +import ( + "encoding/json" + "errors" + "net/http" + "net/http/httptest" + "strings" + "testing" +) + +func TestCommitFromDiffStreamsMetadataAndChunks(t *testing.T) { + var requestPath string + var headerAgent string + var headerContentType string + var lines []string + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + requestPath = r.URL.Path + headerAgent = r.Header.Get("Code-Storage-Agent") + headerContentType = r.Header.Get("Content-Type") + lines = readNDJSONLines(t, r.Body) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"commit":{"commit_sha":"def456","tree_sha":"abc123","target_branch":"main","pack_bytes":84,"blob_count":0},"result":{"branch":"main","old_sha":"0000000000000000000000000000000000000000","new_sha":"def456","success":true,"status":"ok"}}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + result, err := repo.CreateCommitFromDiff(nil, CommitFromDiffOptions{ + TargetBranch: "main", + CommitMessage: "Apply patch", + ExpectedHeadSHA: "abc123", + Author: CommitSignature{Name: "Author Name", Email: "author@example.com"}, + Diff: "diff --git a/file.txt b/file.txt\n", + }) + if err != nil { + t.Fatalf("commit from diff error: %v", err) + } + + if requestPath != "/api/v1/repos/diff-commit" { + t.Fatalf("unexpected path: %s", requestPath) + } + if headerContentType != "application/x-ndjson" { + t.Fatalf("unexpected content type: %s", headerContentType) + } + if headerAgent == "" || !strings.Contains(headerAgent, "code-storage-go-sdk/") { + t.Fatalf("missing Code-Storage-Agent header") + } + if len(lines) != 2 { + t.Fatalf("expected 2 ndjson lines, got %d", len(lines)) + } + + var metadataEnvelope map[string]interface{} + if err := json.Unmarshal([]byte(lines[0]), &metadataEnvelope); err != nil { + t.Fatalf("decode metadata: %v", err) + } + metadata := metadataEnvelope["metadata"].(map[string]interface{}) + if metadata["target_branch"] != "main" { + t.Fatalf("unexpected target_branch: %#v", metadata["target_branch"]) + } + if metadata["expected_head_sha"] != "abc123" { + t.Fatalf("unexpected expected_head_sha") + } + if metadata["commit_message"] != "Apply patch" { + t.Fatalf("unexpected commit_message") + } + + var chunkEnvelope map[string]interface{} + if err := json.Unmarshal([]byte(lines[1]), &chunkEnvelope); err != nil { + t.Fatalf("decode chunk: %v", err) + } + chunk := chunkEnvelope["diff_chunk"].(map[string]interface{}) + if eof, _ := chunk["eof"].(bool); !eof { + t.Fatalf("expected eof true") + } + decoded := decodeBase64(t, chunk["data"].(string)) + if string(decoded) != "diff --git a/file.txt b/file.txt\n" { + t.Fatalf("unexpected diff payload") + } + + if result.CommitSHA != "def456" { + t.Fatalf("unexpected result: %#v", result) + } +} + +func TestCommitFromDiffRequiresDiff(t *testing.T) { + client, err := NewClient(Options{Name: "acme", Key: testKey}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + _, err = repo.CreateCommitFromDiff(nil, CommitFromDiffOptions{ + TargetBranch: "main", + CommitMessage: "Apply patch", + Author: CommitSignature{Name: "Author", Email: "author@example.com"}, + Diff: nil, + }) + if err == nil || !strings.Contains(err.Error(), "createCommitFromDiff diff is required") { + t.Fatalf("expected diff validation error, got %v", err) + } +} + +func TestCommitFromDiffErrorResponseReturnsRefUpdateError(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusConflict) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"result":{"status":"conflict","message":"Head moved","branch":"main","old_sha":"abc","new_sha":"def"}}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + _, err = repo.CreateCommitFromDiff(nil, CommitFromDiffOptions{ + TargetBranch: "refs/heads/main", + CommitMessage: "Apply patch", + ExpectedHeadSHA: "abc", + Author: CommitSignature{Name: "Author", Email: "author@example.com"}, + Diff: "diff --git a/file.txt b/file.txt\n", + }) + if err == nil { + t.Fatalf("expected error") + } + + var refErr *RefUpdateError + if !errors.As(err, &refErr) { + t.Fatalf("expected RefUpdateError, got %T", err) + } + if refErr.Status != "conflict" { + t.Fatalf("unexpected status: %s", refErr.Status) + } + if refErr.RefUpdate == nil || refErr.RefUpdate.Branch != "main" { + t.Fatalf("unexpected ref update") + } +} + +func TestCommitFromDiffIncludesUserAgentHeader(t *testing.T) { + var headerAgent string + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + headerAgent = r.Header.Get("Code-Storage-Agent") + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"commit":{"commit_sha":"useragent123","tree_sha":"tree456","target_branch":"main","pack_bytes":42,"blob_count":0},"result":{"branch":"main","old_sha":"0000000000000000000000000000000000000000","new_sha":"useragent123","success":true,"status":"ok"}}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + _, err = repo.CreateCommitFromDiff(nil, CommitFromDiffOptions{ + TargetBranch: "main", + CommitMessage: "Test user agent", + Author: CommitSignature{Name: "Author Name", Email: "author@example.com"}, + Diff: "diff --git a/test.txt b/test.txt\n", + }) + if err != nil { + t.Fatalf("commit from diff error: %v", err) + } + + if headerAgent == "" || !strings.Contains(headerAgent, "code-storage-go-sdk/") { + t.Fatalf("missing Code-Storage-Agent header") + } +} diff --git a/packages/git-storage-sdk-go/fetch.go b/packages/git-storage-sdk-go/fetch.go index 1ec3922f2..45bc8e5b8 100644 --- a/packages/git-storage-sdk-go/fetch.go +++ b/packages/git-storage-sdk-go/fetch.go @@ -96,7 +96,7 @@ func (f *apiFetcher) request(ctx context.Context, method string, path string, pa } if message == "" { - message = "Request " + method + " " + urlStr + " failed with status " + itoa(resp.StatusCode) + " " + resp.Status + message = "request " + method + " " + urlStr + " failed with status " + itoa(resp.StatusCode) + " " + resp.Status } return nil, &APIError{ diff --git a/packages/git-storage-sdk-go/helpers_test.go b/packages/git-storage-sdk-go/helpers_test.go index 637e40bd5..dfaad7f7e 100644 --- a/packages/git-storage-sdk-go/helpers_test.go +++ b/packages/git-storage-sdk-go/helpers_test.go @@ -1,6 +1,9 @@ package storage import ( + "bytes" + "encoding/base64" + "io" "net/url" "strings" "testing" @@ -52,3 +55,34 @@ func parseJWTFromToken(t *testing.T, token string) jwt.MapClaims { } return claims } + +func readNDJSONLines(t *testing.T, body io.Reader) []string { + t.Helper() + data, err := io.ReadAll(body) + if err != nil { + t.Fatalf("read ndjson body: %v", err) + } + data = bytes.TrimSpace(data) + if len(data) == 0 { + return nil + } + parts := bytes.Split(data, []byte("\n")) + lines := make([]string, len(parts)) + for i, part := range parts { + lines[i] = string(part) + } + return lines +} + +func decodeBase64(t *testing.T, value string) []byte { + t.Helper() + decoded, err := base64.StdEncoding.DecodeString(value) + if err != nil { + t.Fatalf("decode base64: %v", err) + } + return decoded +} + +func boolPtr(value bool) *bool { + return &value +} diff --git a/packages/git-storage-sdk-go/moon.yml b/packages/git-storage-sdk-go/moon.yml index 02bfa4cc0..c9f1f9092 100644 --- a/packages/git-storage-sdk-go/moon.yml +++ b/packages/git-storage-sdk-go/moon.yml @@ -9,6 +9,6 @@ tasks: test: command: go test ./... inputs: - - "**/*.go" + - '**/*.go' - go.mod - go.sum diff --git a/packages/git-storage-sdk-go/repo.go b/packages/git-storage-sdk-go/repo.go index d7a0ed3ea..e3167d4fc 100644 --- a/packages/git-storage-sdk-go/repo.go +++ b/packages/git-storage-sdk-go/repo.go @@ -5,6 +5,7 @@ import ( "errors" "net/http" "net/url" + "strconv" "strings" ) @@ -92,10 +93,10 @@ func (r *Repo) FileStream(ctx context.Context, options GetFileOptions) (*http.Re params.Set("ref", options.Ref) } if options.Ephemeral != nil { - params.Set("ephemeral", boolToString(*options.Ephemeral)) + params.Set("ephemeral", strconv.FormatBool(*options.Ephemeral)) } if options.EphemeralBase != nil { - params.Set("ephemeral_base", boolToString(*options.EphemeralBase)) + params.Set("ephemeral_base", strconv.FormatBool(*options.EphemeralBase)) } resp, err := r.client.api.get(ctx, "repos/file", params, jwtToken, nil) @@ -119,7 +120,7 @@ func (r *Repo) ListFiles(ctx context.Context, options ListFilesOptions) (ListFil params.Set("ref", options.Ref) } if options.Ephemeral != nil { - params.Set("ephemeral", boolToString(*options.Ephemeral)) + params.Set("ephemeral", strconv.FormatBool(*options.Ephemeral)) } if len(params) == 0 { params = nil @@ -403,10 +404,10 @@ func (r *Repo) GetBranchDiff(ctx context.Context, options GetBranchDiffOptions) params.Set("base", options.Base) } if options.Ephemeral != nil { - params.Set("ephemeral", boolToString(*options.Ephemeral)) + params.Set("ephemeral", strconv.FormatBool(*options.Ephemeral)) } if options.EphemeralBase != nil { - params.Set("ephemeral_base", boolToString(*options.EphemeralBase)) + params.Set("ephemeral_base", strconv.FormatBool(*options.EphemeralBase)) } for _, path := range options.Paths { if strings.TrimSpace(path) != "" { @@ -510,18 +511,18 @@ func (r *Repo) Grep(ctx context.Context, options GrepOptions) (GrepResult, error } } if options.Context != nil { - ctx := &grepContextPayload{} + contextPayload := &grepContextPayload{} hasCtx := false if options.Context.Before != nil { - ctx.Before = options.Context.Before + contextPayload.Before = options.Context.Before hasCtx = true } if options.Context.After != nil { - ctx.After = options.Context.After + contextPayload.After = options.Context.After hasCtx = true } if hasCtx { - body.Context = ctx + body.Context = contextPayload } } if options.Limits != nil { @@ -605,7 +606,7 @@ func (r *Repo) PullUpstream(ctx context.Context, options PullUpstreamOptions) er defer resp.Body.Close() if resp.StatusCode != 202 { - return errors.New("Pull Upstream failed: " + resp.Status) + return errors.New("pull upstream failed: " + resp.Status) } return nil } @@ -732,7 +733,7 @@ func (r *Repo) RestoreCommit(ctx context.Context, options RestoreCommitOptions) status = httpStatusToRestoreStatus(resp.StatusCode) } if message == "" { - message = "Restore commit failed with HTTP " + itoa(resp.StatusCode) + message = "restore commit failed with HTTP " + itoa(resp.StatusCode) } return RestoreCommitResult{}, newRefUpdateError(message, status, refUpdate) diff --git a/packages/git-storage-sdk-go/repo_test.go b/packages/git-storage-sdk-go/repo_test.go index fdb54da19..fe404ab26 100644 --- a/packages/git-storage-sdk-go/repo_test.go +++ b/packages/git-storage-sdk-go/repo_test.go @@ -2,6 +2,7 @@ package storage import ( "encoding/json" + "errors" "io" "net/http" "net/http/httptest" @@ -219,6 +220,447 @@ func TestCommitDiffQuery(t *testing.T) { } } -func boolPtr(value bool) *bool { +func TestRemoteURLPermissionsAndTTL(t *testing.T) { + client, err := NewClient(Options{Name: "acme", Key: testKey, StorageBaseURL: "acme.code.storage"}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo-1", DefaultBranch: "main", client: client} + + remote, err := repo.RemoteURL(nil, RemoteURLOptions{ + Permissions: []Permission{PermissionGitRead}, + TTL: 2 * time.Hour, + }) + if err != nil { + t.Fatalf("remote url error: %v", err) + } + claims := parseJWTFromURL(t, remote) + if claims["repo"] != "repo-1" { + t.Fatalf("expected repo claim") + } + scopes, ok := claims["scopes"].([]interface{}) + if !ok || len(scopes) != 1 || scopes[0] != "git:read" { + t.Fatalf("unexpected scopes") + } + exp := int64(claims["exp"].(float64)) + iat := int64(claims["iat"].(float64)) + if exp-iat != int64((2*time.Hour)/time.Second) { + t.Fatalf("unexpected ttl") + } +} + +func TestRemoteURLDefaultTTL(t *testing.T) { + client, err := NewClient(Options{Name: "acme", Key: testKey, StorageBaseURL: "acme.code.storage"}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo-1", DefaultBranch: "main", client: client} + + remote, err := repo.RemoteURL(nil, RemoteURLOptions{}) + if err != nil { + t.Fatalf("remote url error: %v", err) + } + claims := parseJWTFromURL(t, remote) + scopes, ok := claims["scopes"].([]interface{}) + if !ok || len(scopes) != 2 { + t.Fatalf("unexpected scopes") + } + if scopes[0] != "git:write" || scopes[1] != "git:read" { + t.Fatalf("unexpected default scopes") + } + exp := int64(claims["exp"].(float64)) + iat := int64(claims["iat"].(float64)) + if exp-iat != int64((365*24*time.Hour)/time.Second) { + t.Fatalf("unexpected default ttl") + } +} + +func TestListFilesTTL(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + token := strings.TrimPrefix(r.Header.Get("Authorization"), "Bearer ") + claims := parseJWTFromToken(t, token) + exp := int64(claims["exp"].(float64)) + iat := int64(claims["iat"].(float64)) + if exp-iat != 900 { + t.Fatalf("expected ttl 900, got %d", exp-iat) + } + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"paths":[],"ref":"main"}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + _, err = repo.ListFiles(nil, ListFilesOptions{InvocationOptions: InvocationOptions{TTL: 900 * time.Second}}) + if err != nil { + t.Fatalf("list files error: %v", err) + } +} + +func TestGrepResponseParsing(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"query":{"pattern":"SEARCHME","case_sensitive":false},"repo":{"ref":"main","commit":"deadbeef"},"matches":[{"path":"src/a.ts","lines":[{"line_number":12,"text":"SEARCHME","type":"match"}]}],"has_more":false}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + result, err := repo.Grep(nil, GrepOptions{ + Ref: "main", + Paths: []string{"src/"}, + Query: GrepQuery{Pattern: "SEARCHME", CaseSensitive: boolPtr(false)}, + Context: &GrepContext{ + Before: intPtr(1), + After: intPtr(2), + }, + Limits: &GrepLimits{ + MaxLines: intPtr(5), + MaxMatchesPerFile: intPtr(7), + }, + Pagination: &GrepPagination{ + Cursor: "abc", + Limit: intPtr(3), + }, + FileFilters: &GrepFileFilters{ + IncludeGlobs: []string{"**/*.ts"}, + ExcludeGlobs: []string{"**/vendor/**"}, + }, + }) + if err != nil { + t.Fatalf("grep error: %v", err) + } + if result.Query.Pattern != "SEARCHME" || result.Query.CaseSensitive == nil || *result.Query.CaseSensitive != false { + t.Fatalf("unexpected grep query") + } + if result.Repo.Commit != "deadbeef" { + t.Fatalf("unexpected repo commit") + } + if len(result.Matches) != 1 || result.Matches[0].Path != "src/a.ts" { + t.Fatalf("unexpected grep matches") + } +} + +func TestCreateBranchPayloadAndResponse(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + headerAgent := r.Header.Get("Code-Storage-Agent") + if headerAgent == "" || !strings.Contains(headerAgent, "code-storage-go-sdk/") { + t.Fatalf("missing Code-Storage-Agent header") + } + var body createBranchRequest + if err := json.NewDecoder(r.Body).Decode(&body); err != nil { + t.Fatalf("decode body: %v", err) + } + if body.BaseBranch != "main" || body.TargetBranch != "feature/demo" { + t.Fatalf("unexpected branch payload") + } + if !body.BaseIsEphemeral || !body.TargetIsEphemeral { + t.Fatalf("expected ephemeral flags") + } + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"message":"branch created","target_branch":"feature/demo","target_is_ephemeral":true,"commit_sha":"abc123"}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + result, err := repo.CreateBranch(nil, CreateBranchOptions{ + BaseBranch: "main", + TargetBranch: "feature/demo", + BaseIsEphemeral: true, + TargetIsEphemeral: true, + }) + if err != nil { + t.Fatalf("create branch error: %v", err) + } + if result.TargetBranch != "feature/demo" || result.CommitSHA != "abc123" { + t.Fatalf("unexpected create branch result") + } +} + +func TestRestoreCommitSuccess(t *testing.T) { + var capturedBody map[string]interface{} + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + _ = json.NewDecoder(r.Body).Decode(&capturedBody) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"commit":{"commit_sha":"abcdef0123456789abcdef0123456789abcdef01","tree_sha":"fedcba9876543210fedcba9876543210fedcba98","target_branch":"main","pack_bytes":1024},"result":{"branch":"main","old_sha":"0123456789abcdef0123456789abcdef01234567","new_sha":"89abcdef0123456789abcdef0123456789abcdef","success":true,"status":"ok"}}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + response, err := repo.RestoreCommit(nil, RestoreCommitOptions{ + TargetBranch: "main", + ExpectedHeadSHA: "main", + TargetCommitSHA: "0123456789abcdef0123456789abcdef01234567", + CommitMessage: "Restore \"feature\"", + Author: CommitSignature{ + Name: "Author Name", + Email: "author@example.com", + }, + Committer: &CommitSignature{ + Name: "Committer Name", + Email: "committer@example.com", + }, + }) + if err != nil { + t.Fatalf("restore commit error: %v", err) + } + if response.CommitSHA != "abcdef0123456789abcdef0123456789abcdef01" { + t.Fatalf("unexpected commit sha") + } + + metadataEnvelope, ok := capturedBody["metadata"].(map[string]interface{}) + if !ok { + t.Fatalf("missing metadata envelope") + } + if metadataEnvelope["target_branch"] != "main" { + t.Fatalf("unexpected target_branch") + } +} + +func TestRestoreCommitPreconditionFailed(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusPreconditionFailed) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"commit":null,"result":{"success":false,"status":"precondition_failed","message":"expected head SHA mismatch"}}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + _, err = repo.RestoreCommit(nil, RestoreCommitOptions{ + TargetBranch: "main", + ExpectedHeadSHA: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + TargetCommitSHA: "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + Author: CommitSignature{Name: "Author", Email: "author@example.com"}, + }) + if err == nil { + t.Fatalf("expected error") + } + var refErr *RefUpdateError + if !errors.As(err, &refErr) { + t.Fatalf("expected RefUpdateError, got %T", err) + } + if refErr.Status != "precondition_failed" { + t.Fatalf("unexpected status: %s", refErr.Status) + } +} + +func TestRestoreCommitNotFound(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusNotFound) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"error":"not found"}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + _, err = repo.RestoreCommit(nil, RestoreCommitOptions{ + TargetBranch: "main", + TargetCommitSHA: "0123456789abcdef0123456789abcdef01234567", + Author: CommitSignature{Name: "Author Name", Email: "author@example.com"}, + }) + if err == nil || !strings.Contains(err.Error(), "HTTP 404") { + t.Fatalf("expected HTTP 404 error, got %v", err) + } +} + +func TestNoteWriteAppendAndDelete(t *testing.T) { + var requests []map[string]interface{} + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + var payload map[string]interface{} + _ = json.NewDecoder(r.Body).Decode(&payload) + requests = append(requests, payload) + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"sha":"abc","target_ref":"refs/notes/commits","new_ref_sha":"def","result":{"success":true,"status":"ok"}}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + if _, err := repo.AppendNote(nil, AppendNoteOptions{SHA: "abc", Note: "note append"}); err != nil { + t.Fatalf("append note error: %v", err) + } + if _, err := repo.DeleteNote(nil, DeleteNoteOptions{SHA: "abc"}); err != nil { + t.Fatalf("delete note error: %v", err) + } + + if len(requests) != 2 { + t.Fatalf("expected two note requests") + } + if requests[0]["action"] != "append" { + t.Fatalf("expected append action") + } + if _, ok := requests[1]["action"]; ok { + t.Fatalf("did not expect action for delete") + } +} + +func TestGetNote(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + q := r.URL.Query() + if q.Get("sha") != "abc123" { + t.Fatalf("unexpected sha query: %s", q.Get("sha")) + } + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"sha":"abc123","note":"hello notes","ref_sha":"def456"}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + result, err := repo.GetNote(nil, GetNoteOptions{SHA: "abc123"}) + if err != nil { + t.Fatalf("get note error: %v", err) + } + if result.Note != "hello notes" || result.RefSHA != "def456" { + t.Fatalf("unexpected note result") + } +} + +func TestFileStreamEphemeral(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + q := r.URL.Query() + if q.Get("path") != "docs/readme.md" { + t.Fatalf("unexpected path") + } + if q.Get("ref") != "feature/demo" { + t.Fatalf("unexpected ref") + } + if q.Get("ephemeral") != "true" { + t.Fatalf("unexpected ephemeral") + } + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + flag := true + resp, err := repo.FileStream(nil, GetFileOptions{Path: "docs/readme.md", Ref: "feature/demo", Ephemeral: &flag}) + if err != nil { + t.Fatalf("file stream error: %v", err) + } + _ = resp.Body.Close() +} + +func TestFileStreamEphemeralBase(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + q := r.URL.Query() + if q.Get("ephemeral_base") != "true" { + t.Fatalf("unexpected ephemeral_base") + } + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + flag := true + resp, err := repo.FileStream(nil, GetFileOptions{Path: "docs/readme.md", EphemeralBase: &flag}) + if err != nil { + t.Fatalf("file stream error: %v", err) + } + _ = resp.Body.Close() +} + +func TestListCommitsDateParsing(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"commits":[{"sha":"abc123","message":"feat: add endpoint","author_name":"Jane Doe","author_email":"jane@example.com","committer_name":"Jane Doe","committer_email":"jane@example.com","date":"2024-01-15T14:32:18Z"}],"has_more":false}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + result, err := repo.ListCommits(nil, ListCommitsOptions{}) + if err != nil { + t.Fatalf("list commits error: %v", err) + } + if len(result.Commits) != 1 { + t.Fatalf("expected one commit") + } + commit := result.Commits[0] + if commit.RawDate != "2024-01-15T14:32:18Z" { + t.Fatalf("unexpected raw date") + } + if commit.Date.IsZero() { + t.Fatalf("expected parsed date") + } +} + +func TestListCommitsUserAgentHeader(t *testing.T) { + var headerAgent string + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + headerAgent = r.Header.Get("Code-Storage-Agent") + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"commits":[],"has_more":false}`)) + })) + defer server.Close() + + client, err := NewClient(Options{Name: "acme", Key: testKey, APIBaseURL: server.URL}) + if err != nil { + t.Fatalf("client error: %v", err) + } + repo := &Repo{ID: "repo", DefaultBranch: "main", client: client} + + _, err = repo.ListCommits(nil, ListCommitsOptions{}) + if err != nil { + t.Fatalf("list commits error: %v", err) + } + if headerAgent == "" || !strings.Contains(headerAgent, "code-storage-go-sdk/") { + t.Fatalf("missing Code-Storage-Agent header") + } +} + +func intPtr(value int) *int { return &value } diff --git a/packages/git-storage-sdk-go/util.go b/packages/git-storage-sdk-go/util.go index 871e87777..733250420 100644 --- a/packages/git-storage-sdk-go/util.go +++ b/packages/git-storage-sdk-go/util.go @@ -13,13 +13,6 @@ func itoa(value int) string { return strconv.Itoa(value) } -func boolToString(value bool) string { - if value { - return "true" - } - return "false" -} - func decodeJSON(resp *http.Response, target interface{}) error { decoder := json.NewDecoder(resp.Body) return decoder.Decode(target) @@ -180,7 +173,7 @@ func parseNoteWriteResponse(resp *http.Response, method string) (NoteWriteResult } } - fallback := "Request " + method + " " + resp.Request.URL.String() + " failed with status " + strconv.Itoa(resp.StatusCode) + " " + resp.Status + fallback := "request " + method + " " + resp.Request.URL.String() + " failed with status " + strconv.Itoa(resp.StatusCode) + " " + resp.Status if len(rawBody) > 0 { text := strings.TrimSpace(string(rawBody)) if text != "" { diff --git a/packages/git-storage-sdk-go/version_test.go b/packages/git-storage-sdk-go/version_test.go new file mode 100644 index 000000000..822d7a0b5 --- /dev/null +++ b/packages/git-storage-sdk-go/version_test.go @@ -0,0 +1,29 @@ +package storage + +import "testing" + +func TestPackageName(t *testing.T) { + if PackageName == "" { + t.Fatalf("expected PackageName") + } + if PackageName != "code-storage-go-sdk" { + t.Fatalf("unexpected package name: %s", PackageName) + } +} + +func TestPackageVersion(t *testing.T) { + if PackageVersion == "" { + t.Fatalf("expected PackageVersion") + } +} + +func TestUserAgent(t *testing.T) { + agent := userAgent() + if agent == "" { + t.Fatalf("expected user agent") + } + expected := PackageName + "/" + PackageVersion + if agent != expected { + t.Fatalf("unexpected user agent: %s", agent) + } +} diff --git a/packages/git-storage-sdk-go/webhook.go b/packages/git-storage-sdk-go/webhook.go index 75fa139fb..acc455029 100644 --- a/packages/git-storage-sdk-go/webhook.go +++ b/packages/git-storage-sdk-go/webhook.go @@ -48,17 +48,17 @@ func ParseSignatureHeader(header string) *ParsedWebhookSignature { // ValidateWebhookSignature validates the HMAC signature and timestamp. func ValidateWebhookSignature(payload []byte, signatureHeader string, secret string, options WebhookValidationOptions) WebhookValidationResult { if strings.TrimSpace(secret) == "" { - return WebhookValidationResult{Valid: false, Error: "Empty secret is not allowed"} + return WebhookValidationResult{Valid: false, Error: "empty secret is not allowed"} } parsed := ParseSignatureHeader(signatureHeader) if parsed == nil { - return WebhookValidationResult{Valid: false, Error: "Invalid signature header format"} + return WebhookValidationResult{Valid: false, Error: "invalid signature header format"} } timestamp, err := strconv.ParseInt(parsed.Timestamp, 10, 64) if err != nil { - return WebhookValidationResult{Valid: false, Error: "Invalid timestamp in signature"} + return WebhookValidationResult{Valid: false, Error: "invalid timestamp in signature"} } maxAge := options.MaxAgeSeconds @@ -69,10 +69,10 @@ func ValidateWebhookSignature(payload []byte, signatureHeader string, secret str now := time.Now().Unix() age := now - timestamp if age > int64(maxAge) { - return WebhookValidationResult{Valid: false, Error: "Webhook timestamp too old (" + strconv.FormatInt(age, 10) + " seconds)", Timestamp: timestamp} + return WebhookValidationResult{Valid: false, Error: "webhook timestamp too old (" + strconv.FormatInt(age, 10) + " seconds)", Timestamp: timestamp} } if age < -60 { - return WebhookValidationResult{Valid: false, Error: "Webhook timestamp is in the future", Timestamp: timestamp} + return WebhookValidationResult{Valid: false, Error: "webhook timestamp is in the future", Timestamp: timestamp} } } @@ -82,11 +82,11 @@ func ValidateWebhookSignature(payload []byte, signatureHeader string, secret str expected := mac.Sum(nil) provided, err := hex.DecodeString(parsed.Signature) if err != nil { - return WebhookValidationResult{Valid: false, Error: "Invalid signature", Timestamp: timestamp} + return WebhookValidationResult{Valid: false, Error: "invalid signature", Timestamp: timestamp} } if len(expected) != len(provided) || !hmac.Equal(expected, provided) { - return WebhookValidationResult{Valid: false, Error: "Invalid signature", Timestamp: timestamp} + return WebhookValidationResult{Valid: false, Error: "invalid signature", Timestamp: timestamp} } return WebhookValidationResult{Valid: true, Timestamp: timestamp} @@ -99,7 +99,7 @@ func ValidateWebhook(payload []byte, headers http.Header, secret string, options signatureHeader = headers.Get("x-pierre-signature") } if signatureHeader == "" { - return WebhookValidation{WebhookValidationResult: WebhookValidationResult{Valid: false, Error: "Missing or invalid X-Pierre-Signature header"}} + return WebhookValidation{WebhookValidationResult: WebhookValidationResult{Valid: false, Error: "missing or invalid X-Pierre-Signature header"}} } eventType := headers.Get("X-Pierre-Event") @@ -107,7 +107,7 @@ func ValidateWebhook(payload []byte, headers http.Header, secret string, options eventType = headers.Get("x-pierre-event") } if eventType == "" { - return WebhookValidation{WebhookValidationResult: WebhookValidationResult{Valid: false, Error: "Missing or invalid X-Pierre-Event header"}} + return WebhookValidation{WebhookValidationResult: WebhookValidationResult{Valid: false, Error: "missing or invalid X-Pierre-Event header"}} } validation := ValidateWebhookSignature(payload, signatureHeader, secret, options) @@ -120,7 +120,7 @@ func ValidateWebhook(payload []byte, headers http.Header, secret string, options var raw json.RawMessage if err := json.Unmarshal(payload, &raw); err != nil { validation.Valid = false - validation.Error = "Invalid JSON payload" + validation.Error = "invalid JSON payload" return WebhookValidation{WebhookValidationResult: validation} } @@ -153,7 +153,7 @@ func convertWebhookPayload(eventType string, payload []byte) (WebhookEventPayloa return WebhookEventPayload{}, err } if raw.Repository.ID == "" || raw.Repository.URL == "" || raw.Ref == "" || raw.Before == "" || raw.After == "" || raw.CustomerID == "" || raw.PushedAt == "" { - return WebhookEventPayload{}, errors.New("Invalid push payload") + return WebhookEventPayload{}, errors.New("invalid push payload") } return WebhookEventPayload{Push: &WebhookPushEvent{ Type: "push", diff --git a/packages/git-storage-sdk-go/webhook_test.go b/packages/git-storage-sdk-go/webhook_test.go new file mode 100644 index 000000000..d47f21b13 --- /dev/null +++ b/packages/git-storage-sdk-go/webhook_test.go @@ -0,0 +1,204 @@ +package storage + +import ( + "crypto/hmac" + "crypto/sha256" + "encoding/hex" + "net/http" + "strconv" + "strings" + "testing" + "time" +) + +func TestParseSignatureHeader(t *testing.T) { + header := "t=1234567890,sha256=abcdef123456" + result := ParseSignatureHeader(header) + if result == nil || result.Timestamp != "1234567890" || result.Signature != "abcdef123456" { + t.Fatalf("unexpected signature header parse") + } + + result = ParseSignatureHeader("t=1234567890, sha256=abcdef123456") + if result == nil || result.Signature != "abcdef123456" { + t.Fatalf("expected signature with spaces") + } + + if ParseSignatureHeader("") != nil { + t.Fatalf("expected nil for empty header") + } + if ParseSignatureHeader("invalid") != nil { + t.Fatalf("expected nil for invalid header") + } + if ParseSignatureHeader("t=123") != nil { + t.Fatalf("expected nil for missing signature") + } + if ParseSignatureHeader("sha256=abc") != nil { + t.Fatalf("expected nil for missing timestamp") + } + if ParseSignatureHeader("timestamp=123,signature=abc") != nil { + t.Fatalf("expected nil for wrong keys") + } + + header = "t=1234567890,sha256=abcdef123456,v1=ignored" + result = ParseSignatureHeader(header) + if result == nil || result.Signature != "abcdef123456" { + t.Fatalf("expected signature with extra fields") + } +} + +func TestValidateWebhookSignature(t *testing.T) { + secret := "test_webhook_secret_key_123" + payload := []byte(`{"repository":{"id":"repo","url":"https://git.example.com/org/repo"},"ref":"main","before":"abc","after":"def","customer_id":"cust","pushed_at":"2024-01-20T10:30:00Z"}`) + stamp := time.Now().Unix() + header := buildSignatureHeader(t, payload, secret, stamp) + + result := ValidateWebhookSignature(payload, header, secret, WebhookValidationOptions{}) + if !result.Valid || result.Timestamp != stamp { + t.Fatalf("expected valid signature") + } + + invalidHeader := buildSignatureHeader(t, payload, "wrong_secret", stamp) + result = ValidateWebhookSignature(payload, invalidHeader, secret, WebhookValidationOptions{}) + if result.Valid || result.Error != "invalid signature" { + t.Fatalf("expected invalid signature") + } + + oldStamp := time.Now().Add(-400 * time.Second).Unix() + header = buildSignatureHeader(t, payload, secret, oldStamp) + result = ValidateWebhookSignature(payload, header, secret, WebhookValidationOptions{}) + if result.Valid || !strings.Contains(result.Error, "webhook timestamp too old") { + t.Fatalf("expected old timestamp error") + } + + futureStamp := time.Now().Add(120 * time.Second).Unix() + header = buildSignatureHeader(t, payload, secret, futureStamp) + result = ValidateWebhookSignature(payload, header, secret, WebhookValidationOptions{}) + if result.Valid || result.Error != "webhook timestamp is in the future" { + t.Fatalf("expected future timestamp error") + } + + stamp = time.Now().Add(-60 * time.Second).Unix() + header = buildSignatureHeader(t, payload, secret, stamp) + result = ValidateWebhookSignature(payload, header, secret, WebhookValidationOptions{MaxAgeSeconds: 30}) + if result.Valid { + t.Fatalf("expected signature to be too old") + } + result = ValidateWebhookSignature(payload, header, secret, WebhookValidationOptions{MaxAgeSeconds: 120}) + if !result.Valid { + t.Fatalf("expected signature to be valid with relaxed max age") + } + + result = ValidateWebhookSignature(payload, "invalid_header", secret, WebhookValidationOptions{}) + if result.Valid || result.Error != "invalid signature header format" { + t.Fatalf("expected invalid header format error") + } + + result = ValidateWebhookSignature(payload, "t=not_a_number,sha256=abcdef", secret, WebhookValidationOptions{}) + if result.Valid || result.Error != "invalid timestamp in signature" { + t.Fatalf("expected invalid timestamp error") + } + + modified := []byte(strings.ReplaceAll(string(payload), "main", "master")) + result = ValidateWebhookSignature(modified, header, secret, WebhookValidationOptions{}) + if result.Valid { + t.Fatalf("expected modified payload to fail") + } + + result = ValidateWebhookSignature(append(payload, ' '), header, secret, WebhookValidationOptions{}) + if result.Valid { + t.Fatalf("expected whitespace payload to fail") + } +} + +func TestValidateWebhook(t *testing.T) { + secret := "test_webhook_secret_key_123" + payload := []byte(`{"repository":{"id":"repo_abc123","url":"https://git.example.com/org/repo"},"ref":"main","before":"abc123","after":"def456","customer_id":"cust_123","pushed_at":"2024-01-20T10:30:00Z"}`) + stamp := time.Now().Unix() + header := buildSignatureHeader(t, payload, secret, stamp) + + headers := http.Header{} + headers.Set("x-pierre-signature", header) + headers.Set("x-pierre-event", "push") + + result := ValidateWebhook(payload, headers, secret, WebhookValidationOptions{}) + if !result.Valid || result.EventType != "push" { + t.Fatalf("expected valid webhook") + } + if result.Payload == nil || result.Payload.Push == nil { + t.Fatalf("expected push payload") + } + if result.Payload.Push.CustomerID != "cust_123" || result.Payload.Push.Repository.ID != "repo_abc123" { + t.Fatalf("unexpected push payload contents") + } + + uppercase := http.Header{} + uppercase.Set("X-Pierre-Signature", header) + uppercase.Set("X-Pierre-Event", "push") + result = ValidateWebhook(payload, uppercase, secret, WebhookValidationOptions{}) + if !result.Valid { + t.Fatalf("expected valid webhook with uppercase headers") + } + + missingSig := http.Header{} + missingSig.Set("x-pierre-event", "push") + result = ValidateWebhook(payload, missingSig, secret, WebhookValidationOptions{}) + if result.Valid || result.Error != "missing or invalid X-Pierre-Signature header" { + t.Fatalf("expected missing signature error") + } + + missingEvent := http.Header{} + missingEvent.Set("x-pierre-signature", header) + result = ValidateWebhook(payload, missingEvent, secret, WebhookValidationOptions{}) + if result.Valid || result.Error != "missing or invalid X-Pierre-Event header" { + t.Fatalf("expected missing event error") + } + + invalidJSON := []byte("not valid json") + badHeader := buildSignatureHeader(t, invalidJSON, secret, stamp) + badHeaders := http.Header{} + badHeaders.Set("x-pierre-signature", badHeader) + badHeaders.Set("x-pierre-event", "push") + result = ValidateWebhook(invalidJSON, badHeaders, secret, WebhookValidationOptions{}) + if result.Valid || result.Error != "invalid JSON payload" { + t.Fatalf("expected invalid JSON payload error") + } + + wrongSig := buildSignatureHeader(t, payload, "wrong_secret", stamp) + wrongHeaders := http.Header{} + wrongHeaders.Set("x-pierre-signature", wrongSig) + wrongHeaders.Set("x-pierre-event", "push") + result = ValidateWebhook(payload, wrongHeaders, secret, WebhookValidationOptions{}) + if result.Valid || result.Error != "invalid signature" { + t.Fatalf("expected invalid signature error") + } +} + +func TestWebhookEmptyInputs(t *testing.T) { + secret := "test_webhook_secret_key_123" + payload := []byte(`{"repository":{"id":"repo","url":"https://git.example.com/org/repo"},"ref":"main","before":"abc","after":"def","customer_id":"cust","pushed_at":"2024-01-20T10:30:00Z"}`) + stamp := time.Now().Unix() + header := buildSignatureHeader(t, payload, secret, stamp) + + result := ValidateWebhookSignature([]byte{}, header, secret, WebhookValidationOptions{}) + if result.Valid { + t.Fatalf("expected empty payload to fail") + } + + result = ValidateWebhookSignature(payload, header, "", WebhookValidationOptions{}) + if result.Valid || result.Error != "empty secret is not allowed" { + t.Fatalf("expected empty secret error") + } + + result = ValidateWebhookSignature(payload, "", secret, WebhookValidationOptions{}) + if result.Valid || result.Error != "invalid signature header format" { + t.Fatalf("expected empty header error") + } +} + +func buildSignatureHeader(t *testing.T, payload []byte, secret string, timestamp int64) string { + t.Helper() + mac := hmac.New(sha256.New, []byte(secret)) + mac.Write([]byte(strconv.FormatInt(timestamp, 10) + "." + string(payload))) + signature := hex.EncodeToString(mac.Sum(nil)) + return "t=" + strconv.FormatInt(timestamp, 10) + ",sha256=" + signature +} diff --git a/packages/git-storage-sdk-node/AGENTS.md b/packages/git-storage-sdk-node/AGENTS.md index 30f91cad2..20bf273f7 100644 --- a/packages/git-storage-sdk-node/AGENTS.md +++ b/packages/git-storage-sdk-node/AGENTS.md @@ -1,14 +1,15 @@ # Git Storage SDK – Agent Notes -This package (`@pierre/storage`) is published publicly and ships to external customers. Treat the -repository as production-critical: follow semver expectations, avoid breaking changes without -coordination, and keep documentation in sync with code. +This package (`@pierre/storage`) is published publicly and ships to external +customers. Treat the repository as production-critical: follow semver +expectations, avoid breaking changes without coordination, and keep +documentation in sync with code. ## Package Purpose - TypeScript/ESM + CommonJS SDK for Pierre’s Git storage APIs. -- Generates authenticated Git remote URLs and wraps REST endpoints for repo management, diff - retrieval, commit packs, and branch restore operations. +- Generates authenticated Git remote URLs and wraps REST endpoints for repo + management, diff retrieval, commit packs, and branch restore operations. - Distributed via npm; consumers rely on the generated `dist` output. ## Build & Test @@ -17,8 +18,8 @@ coordination, and keep documentation in sync with code. - Tests: `pnpm --filter @pierre/storage exec vitest --run`. - Full end-to-end smoke test hitting Pierre environments: `node packages/git-storage-sdk/tests/full-workflow.js -e production -s pierre -k /home/ian/pierre-prod-key.pem` - (change the `-e`, `-s`, and key path for your setup). The script provisions a repo, commits - changes, and exercises diff/list APIs via the SDK. + (change the `-e`, `-s`, and key path for your setup). The script provisions a + repo, commits changes, and exercises diff/list APIs via the SDK. ## Key Files @@ -31,34 +32,40 @@ coordination, and keep documentation in sync with code. ## Development Notes - `resolveCommitTtlSeconds` default TTL = 1 hour unless overridden. -- Use `DEFAULT_TOKEN_TTL_SECONDS` for 1-hour defaults (avoid hard-coded `1 * 60 * 60`). -- `Repo.restoreCommit` streams metadata to `repos/restore-commit`. Legacy `restore-commits` and - `reset-commits` endpoints remain deployed but the SDK no longer auto-falls back; callers must hit - those routes explicitly if needed. -- Commit builder (`createCommit().send()`) and `Repo.restoreCommit` throw `RefUpdateError` when the - backend rejects a ref update; keep status/reason/message/ref mapping intact. -- `Repo.createCommitFromDiff` streams pre-generated patches to `repos/diff-commit` (accepts the diff - payload directly and returns a `Promise`). It shares the same `RefUpdateError` - semantics—reuse the commit-pack helpers when adjusting error handling. +- Use `DEFAULT_TOKEN_TTL_SECONDS` for 1-hour defaults (avoid hard-coded + `1 * 60 * 60`). +- `Repo.restoreCommit` streams metadata to `repos/restore-commit`. Legacy + `restore-commits` and `reset-commits` endpoints remain deployed but the SDK no + longer auto-falls back; callers must hit those routes explicitly if needed. +- Commit builder (`createCommit().send()`) and `Repo.restoreCommit` throw + `RefUpdateError` when the backend rejects a ref update; keep + status/reason/message/ref mapping intact. +- `Repo.createCommitFromDiff` streams pre-generated patches to + `repos/diff-commit` (accepts the diff payload directly and returns a + `Promise`). It shares the same `RefUpdateError` semantics—reuse + the commit-pack helpers when adjusting error handling. - Authentication relies on ES256 private key; see README for sample key. -- When adjusting request/response shapes, reflect changes in both TypeScript types and README. -- Avoid importing Node built-ins that break browser usage; the SDK is intended for Node + edge - runtimes with fetch available. -- Maintain the Result vs Response distinction: raw API payloads remain `*Response` while SDK - consumers receive camelCase `*Result` objects. Update both transformer utilities and docs - together. -- Diff responses normalize the Git status via `normalizeDiffState`, exposing both `state` and - `rawState`; extend that mapping instead of passing raw enums through directly. -- Webhook validation returns typed push events (parsed `Date`, camelCase fields) or a - `WebhookUnknownEvent` fallback—keep this discriminated union intact when adding new events. -- Commit and commit-list APIs convert timestamps to `Date` while preserving `rawDate`; apply the - same pattern to future time fields. -- Commit builder accepts `Blob`, `File`, `ReadableStream`, and iterable sources; new sources should - be funneled through `toAsyncIterable`/`ensureUint8Array`. -- `CommitFileOptions.mode` is restricted to `GitFileMode` literals; ensure additional modes are - codified there. -- `CommitTextFileOptions.encoding` supports Node `Buffer` encodings and defaults to UTF-8; retain - the Buffer-based fallback for non-UTF encodings. +- When adjusting request/response shapes, reflect changes in both TypeScript + types and README. +- Avoid importing Node built-ins that break browser usage; the SDK is intended + for Node + edge runtimes with fetch available. +- Maintain the Result vs Response distinction: raw API payloads remain + `*Response` while SDK consumers receive camelCase `*Result` objects. Update + both transformer utilities and docs together. +- Diff responses normalize the Git status via `normalizeDiffState`, exposing + both `state` and `rawState`; extend that mapping instead of passing raw enums + through directly. +- Webhook validation returns typed push events (parsed `Date`, camelCase fields) + or a `WebhookUnknownEvent` fallback—keep this discriminated union intact when + adding new events. +- Commit and commit-list APIs convert timestamps to `Date` while preserving + `rawDate`; apply the same pattern to future time fields. +- Commit builder accepts `Blob`, `File`, `ReadableStream`, and iterable sources; + new sources should be funneled through `toAsyncIterable`/`ensureUint8Array`. +- `CommitFileOptions.mode` is restricted to `GitFileMode` literals; ensure + additional modes are codified there. +- `CommitTextFileOptions.encoding` supports Node `Buffer` encodings and defaults + to UTF-8; retain the Buffer-based fallback for non-UTF encodings. ## Release Checklist diff --git a/packages/git-storage-sdk-node/README.md b/packages/git-storage-sdk-node/README.md index 76eb3774d..d9a3602b1 100644 --- a/packages/git-storage-sdk-node/README.md +++ b/packages/git-storage-sdk-node/README.md @@ -6,9 +6,10 @@ Pierre Git Storage SDK for TypeScript/JavaScript applications. - `node packages/git-storage-sdk/tests/full-workflow.js -e production -s pierre -k /home/ian/pierre-prod-key.pem` Drives - the Pierre workflow via the SDK: creates a repository, writes commits, fetches branch and diff - data, and confirms storage APIs. Swap in your own private key path when running outside this - workstation and adjust `-e`/`-s` for non-production environments. + the Pierre workflow via the SDK: creates a repository, writes commits, fetches + branch and diff data, and confirms storage APIs. Swap in your own private key + path when running outside this workstation and adjust `-e`/`-s` for + non-production environments. ## Installation @@ -99,8 +100,8 @@ const readOnlyUrl = await repo.getRemoteURL({ #### Ephemeral Branches -For working with ephemeral branches (temporary branches isolated from the main repository), use -`getEphemeralRemote()`: +For working with ephemeral branches (temporary branches isolated from the main +repository), use `getEphemeralRemote()`: ```typescript // Get ephemeral namespace remote URL @@ -109,7 +110,9 @@ const ephemeralUrl = await repo.getEphemeralRemoteURL(); // Configure separate remotes for default and ephemeral branches console.log(`Run: git remote add origin ${await repo.getRemoteURL()}`); -console.log(`Run: git remote add ephemeral ${await repo.getEphemeralRemoteURL()}`); +console.log( + `Run: git remote add ephemeral ${await repo.getEphemeralRemoteURL()}` +); // Push ephemeral branch // git push ephemeral feature-branch @@ -224,10 +227,11 @@ console.log(result.refUpdate.oldSha); // All zeroes when the ref is created The builder exposes: -- `addFile(path, source, options)` to attach bytes from strings, typed arrays, ArrayBuffers, `Blob` - or `File` objects, `ReadableStream`s, or iterable/async-iterable sources. -- `addFileFromString(path, contents, options)` for text helpers (defaults to UTF-8 and accepts any - Node.js `BufferEncoding`). +- `addFile(path, source, options)` to attach bytes from strings, typed arrays, + ArrayBuffers, `Blob` or `File` objects, `ReadableStream`s, or + iterable/async-iterable sources. +- `addFileFromString(path, contents, options)` for text helpers (defaults to + UTF-8 and accepts any Node.js `BufferEncoding`). - `deletePath(path)` to remove files or folders. - `send()` to finalize the commit and receive metadata about the new commit. @@ -248,39 +252,46 @@ type CommitResult = { }; ``` -If the backend reports a failure (for example, the branch advanced past `expectedHeadSha`) the -builder throws a `RefUpdateError` containing the status, reason, and ref details. +If the backend reports a failure (for example, the branch advanced past +`expectedHeadSha`) the builder throws a `RefUpdateError` containing the status, +reason, and ref details. **Options** -- `targetBranch` (required): Branch name (for example `main`) that will receive the commit. -- `expectedHeadSha` (optional): Commit SHA that must match the remote tip; omit to fast-forward - unconditionally. -- `baseBranch` (optional): Mirrors the `base_branch` metadata and names an existing branch whose tip - should seed `targetBranch` if it does not exist. Leave `expectedHeadSha` empty when creating a new - branch from `baseBranch`; when both are provided and the branch already exists, `expectedHeadSha` +- `targetBranch` (required): Branch name (for example `main`) that will receive + the commit. +- `expectedHeadSha` (optional): Commit SHA that must match the remote tip; omit + to fast-forward unconditionally. +- `baseBranch` (optional): Mirrors the `base_branch` metadata and names an + existing branch whose tip should seed `targetBranch` if it does not exist. + Leave `expectedHeadSha` empty when creating a new branch from `baseBranch`; + when both are provided and the branch already exists, `expectedHeadSha` continues to enforce the fast-forward guard. - `commitMessage` (required): The commit message. - `author` (required): Include `name` and `email` for the commit author. -- `committer` (optional): Include `name` and `email`. If omitted, the author identity is reused. +- `committer` (optional): Include `name` and `email`. If omitted, the author + identity is reused. - `signal` (optional): Abort an in-flight upload with `AbortController`. -- `targetRef` (deprecated, optional): Fully qualified ref (for example `refs/heads/main`). Prefer - `targetBranch`, which now accepts plain branch names. +- `targetRef` (deprecated, optional): Fully qualified ref (for example + `refs/heads/main`). Prefer `targetBranch`, which now accepts plain branch + names. -> Files are chunked into 4 MiB segments under the hood, so you can stream large assets without -> buffering them entirely in memory. File paths are normalized relative to the repository root. +> Files are chunked into 4 MiB segments under the hood, so you can stream large +> assets without buffering them entirely in memory. File paths are normalized +> relative to the repository root. -> The `targetBranch` must already exist on the remote repository unless you provide `baseBranch` (or -> the repository has no refs). To seed an empty repository, point to the default branch and omit -> `expectedHeadSha`. To create a missing branch within an existing repository, set `baseBranch` to -> the source branch and omit `expectedHeadSha` so the service clones that tip before applying your -> changes. +> The `targetBranch` must already exist on the remote repository unless you +> provide `baseBranch` (or the repository has no refs). To seed an empty +> repository, point to the default branch and omit `expectedHeadSha`. To create +> a missing branch within an existing repository, set `baseBranch` to the source +> branch and omit `expectedHeadSha` so the service clones that tip before +> applying your changes. ### Apply a pre-generated diff -If you already have a patch (for example, the output of `git diff --binary`) you can stream it to -the gateway with a single call. The SDK handles chunking and NDJSON streaming just like it does for -regular commits: +If you already have a patch (for example, the output of `git diff --binary`) you +can stream it to the gateway with a single call. The SDK handles chunking and +NDJSON streaming just like it does for regular commits: ```ts const fs = await import('node:fs/promises'); @@ -299,15 +310,16 @@ console.log(diffResult.commitSha); console.log(diffResult.refUpdate.newSha); ``` -The `diff` field accepts a `string`, `Uint8Array`, `ArrayBuffer`, `Blob`, `File`, `ReadableStream`, -iterable, or async iterable of byte chunks—the same sources supported by the standard commit -builder. `createCommitFromDiff` returns a `Promise` and throws a `RefUpdateError` when -the server rejects the diff (for example, if the branch tip changed). +The `diff` field accepts a `string`, `Uint8Array`, `ArrayBuffer`, `Blob`, +`File`, `ReadableStream`, iterable, or async iterable of byte chunks—the same +sources supported by the standard commit builder. `createCommitFromDiff` returns +a `Promise` and throws a `RefUpdateError` when the server rejects +the diff (for example, if the branch tip changed). ### Streaming Large Files -The commit builder accepts any async iterable of bytes, so you can stream large assets without -buffering: +The commit builder accepts any async iterable of bytes, so you can stream large +assets without buffering: ```typescript import { createReadStream } from 'node:fs'; @@ -639,14 +651,15 @@ interface RestoreCommitResult { ## Authentication -The SDK uses JWT (JSON Web Tokens) for authentication. When you call `getRemoteURL()`, it: +The SDK uses JWT (JSON Web Tokens) for authentication. When you call +`getRemoteURL()`, it: 1. Creates a JWT with your name, repository ID, and requested permissions 2. Signs it with your key 3. Embeds it in the Git remote URL as the password -The generated URLs are compatible with standard Git clients and include all necessary -authentication. +The generated URLs are compatible with standard Git clients and include all +necessary authentication. ## Error Handling @@ -667,9 +680,9 @@ try { - ``` -- Mutating operations (commit builder, `restoreCommit`) throw `RefUpdateError` when the backend - reports a ref failure. Inspect `error.status`, `error.reason`, `error.message`, and - `error.refUpdate` for details. +- Mutating operations (commit builder, `restoreCommit`) throw `RefUpdateError` + when the backend reports a ref failure. Inspect `error.status`, + `error.reason`, `error.message`, and `error.refUpdate` for details. ## License diff --git a/packages/git-storage-sdk-node/package.json b/packages/git-storage-sdk-node/package.json index c280e2ce9..72999d06c 100644 --- a/packages/git-storage-sdk-node/package.json +++ b/packages/git-storage-sdk-node/package.json @@ -1,40 +1,40 @@ { - "name": "@pierre/storage", - "version": "0.9.3", - "description": "Pierre Git Storage SDK", - "license": "MIT", - "type": "module", - "main": "./dist/index.cjs", - "module": "./dist/index.js", - "types": "./dist/index.d.ts", - "exports": { - ".": { - "types": "./dist/index.d.ts", - "import": "./dist/index.js", - "require": "./dist/index.cjs", - "default": "./dist/index.js" - } - }, - "files": [ - "dist", - "src" - ], - "scripts": { - "build": "tsup", - "dev": "tsup --watch", - "prepublishOnly": "pnpm build" - }, - "dependencies": { - "jose": "^5.10.0", - "snakecase-keys": "^9.0.2", - "zod": "^3.23.8" - }, - "devDependencies": { - "tsup": "8.5.0", - "typescript": "5.8.3", - "vitest": "3.2.4" - }, - "publishConfig": { - "access": "public" - } + "name": "@pierre/storage", + "version": "0.9.3", + "description": "Pierre Git Storage SDK", + "license": "MIT", + "type": "module", + "main": "./dist/index.cjs", + "module": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + ".": { + "types": "./dist/index.d.ts", + "import": "./dist/index.js", + "require": "./dist/index.cjs", + "default": "./dist/index.js" + } + }, + "files": [ + "dist", + "src" + ], + "scripts": { + "build": "tsup", + "dev": "tsup --watch", + "prepublishOnly": "pnpm build" + }, + "dependencies": { + "jose": "^5.10.0", + "snakecase-keys": "^9.0.2", + "zod": "^3.23.8" + }, + "devDependencies": { + "tsup": "8.5.0", + "typescript": "5.8.3", + "vitest": "3.2.4" + }, + "publishConfig": { + "access": "public" + } } diff --git a/packages/git-storage-sdk-node/src/commit-pack.ts b/packages/git-storage-sdk-node/src/commit-pack.ts index 5576190ed..508ddff56 100644 --- a/packages/git-storage-sdk-node/src/commit-pack.ts +++ b/packages/git-storage-sdk-node/src/commit-pack.ts @@ -1,4 +1,4 @@ -import { inferRefUpdateReason, RefUpdateError } from './errors'; +import { RefUpdateError, inferRefUpdateReason } from './errors'; import type { CommitPackAckRaw } from './schemas'; import { commitPackResponseSchema, errorEnvelopeSchema } from './schemas'; import type { CommitResult, RefUpdate } from './types'; @@ -6,123 +6,127 @@ import type { CommitResult, RefUpdate } from './types'; export type CommitPackAck = CommitPackAckRaw; export function buildCommitResult(ack: CommitPackAckRaw): CommitResult { - const refUpdate = toRefUpdate(ack.result); - if (!ack.result.success) { - throw new RefUpdateError( - ack.result.message ?? `Commit failed with status ${ack.result.status}`, - { - status: ack.result.status, - message: ack.result.message, - refUpdate, - }, - ); - } - return { - commitSha: ack.commit.commit_sha, - treeSha: ack.commit.tree_sha, - targetBranch: ack.commit.target_branch, - packBytes: ack.commit.pack_bytes, - blobCount: ack.commit.blob_count, - refUpdate, - }; + const refUpdate = toRefUpdate(ack.result); + if (!ack.result.success) { + throw new RefUpdateError( + ack.result.message ?? `Commit failed with status ${ack.result.status}`, + { + status: ack.result.status, + message: ack.result.message, + refUpdate, + } + ); + } + return { + commitSha: ack.commit.commit_sha, + treeSha: ack.commit.tree_sha, + targetBranch: ack.commit.target_branch, + packBytes: ack.commit.pack_bytes, + blobCount: ack.commit.blob_count, + refUpdate, + }; } export function toRefUpdate(result: CommitPackAckRaw['result']): RefUpdate { - return { - branch: result.branch, - oldSha: result.old_sha, - newSha: result.new_sha, - }; + return { + branch: result.branch, + oldSha: result.old_sha, + newSha: result.new_sha, + }; } export async function parseCommitPackError( - response: Response, - fallbackMessage: string, + response: Response, + fallbackMessage: string ): Promise<{ - statusMessage: string; - statusLabel: string; - refUpdate?: Partial; + statusMessage: string; + statusLabel: string; + refUpdate?: Partial; }> { - const cloned = response.clone(); - let jsonBody: unknown; - try { - jsonBody = await cloned.json(); - } catch { - jsonBody = undefined; - } + const cloned = response.clone(); + let jsonBody: unknown; + try { + jsonBody = await cloned.json(); + } catch { + jsonBody = undefined; + } - let textBody: string | undefined; - if (jsonBody === undefined) { - try { - textBody = await response.text(); - } catch { - textBody = undefined; - } - } + let textBody: string | undefined; + if (jsonBody === undefined) { + try { + textBody = await response.text(); + } catch { + textBody = undefined; + } + } - const defaultStatus = (() => { - const inferred = inferRefUpdateReason(String(response.status)); - return inferred === 'unknown' ? 'failed' : inferred; - })(); - let statusLabel = defaultStatus; - let refUpdate: Partial | undefined; - let message: string | undefined; + const defaultStatus = (() => { + const inferred = inferRefUpdateReason(String(response.status)); + return inferred === 'unknown' ? 'failed' : inferred; + })(); + let statusLabel = defaultStatus; + let refUpdate: Partial | undefined; + let message: string | undefined; - if (jsonBody !== undefined) { - const parsedResponse = commitPackResponseSchema.safeParse(jsonBody); - if (parsedResponse.success) { - const result = parsedResponse.data.result; - if (typeof result.status === 'string' && result.status.trim() !== '') { - statusLabel = result.status.trim() as typeof statusLabel; - } - refUpdate = toPartialRefUpdateFields(result.branch, result.old_sha, result.new_sha); - if (typeof result.message === 'string' && result.message.trim() !== '') { - message = result.message.trim(); - } - } + if (jsonBody !== undefined) { + const parsedResponse = commitPackResponseSchema.safeParse(jsonBody); + if (parsedResponse.success) { + const result = parsedResponse.data.result; + if (typeof result.status === 'string' && result.status.trim() !== '') { + statusLabel = result.status.trim() as typeof statusLabel; + } + refUpdate = toPartialRefUpdateFields( + result.branch, + result.old_sha, + result.new_sha + ); + if (typeof result.message === 'string' && result.message.trim() !== '') { + message = result.message.trim(); + } + } - if (!message) { - const parsedError = errorEnvelopeSchema.safeParse(jsonBody); - if (parsedError.success) { - const trimmed = parsedError.data.error.trim(); - if (trimmed) { - message = trimmed; - } - } - } - } + if (!message) { + const parsedError = errorEnvelopeSchema.safeParse(jsonBody); + if (parsedError.success) { + const trimmed = parsedError.data.error.trim(); + if (trimmed) { + message = trimmed; + } + } + } + } - if (!message && typeof jsonBody === 'string' && jsonBody.trim() !== '') { - message = jsonBody.trim(); - } + if (!message && typeof jsonBody === 'string' && jsonBody.trim() !== '') { + message = jsonBody.trim(); + } - if (!message && textBody && textBody.trim() !== '') { - message = textBody.trim(); - } + if (!message && textBody && textBody.trim() !== '') { + message = textBody.trim(); + } - return { - statusMessage: message ?? fallbackMessage, - statusLabel, - refUpdate, - }; + return { + statusMessage: message ?? fallbackMessage, + statusLabel, + refUpdate, + }; } function toPartialRefUpdateFields( - branch?: string | null, - oldSha?: string | null, - newSha?: string | null, + branch?: string | null, + oldSha?: string | null, + newSha?: string | null ): Partial | undefined { - const refUpdate: Partial = {}; + const refUpdate: Partial = {}; - if (typeof branch === 'string' && branch.trim() !== '') { - refUpdate.branch = branch.trim(); - } - if (typeof oldSha === 'string' && oldSha.trim() !== '') { - refUpdate.oldSha = oldSha.trim(); - } - if (typeof newSha === 'string' && newSha.trim() !== '') { - refUpdate.newSha = newSha.trim(); - } + if (typeof branch === 'string' && branch.trim() !== '') { + refUpdate.branch = branch.trim(); + } + if (typeof oldSha === 'string' && oldSha.trim() !== '') { + refUpdate.oldSha = oldSha.trim(); + } + if (typeof newSha === 'string' && newSha.trim() !== '') { + refUpdate.newSha = newSha.trim(); + } - return Object.keys(refUpdate).length > 0 ? refUpdate : undefined; + return Object.keys(refUpdate).length > 0 ? refUpdate : undefined; } diff --git a/packages/git-storage-sdk-node/src/commit.ts b/packages/git-storage-sdk-node/src/commit.ts index 30ed6ed48..5536fa111 100644 --- a/packages/git-storage-sdk-node/src/commit.ts +++ b/packages/git-storage-sdk-node/src/commit.ts @@ -3,22 +3,22 @@ import { RefUpdateError } from './errors'; import type { CommitPackAckRaw } from './schemas'; import { commitPackAckSchema } from './schemas'; import { - base64Encode, - type ChunkSegment, - chunkify, - requiresDuplex, - toAsyncIterable, - toRequestBody, + type ChunkSegment, + base64Encode, + chunkify, + requiresDuplex, + toAsyncIterable, + toRequestBody, } from './stream-utils'; import type { - CommitBuilder, - CommitFileOptions, - CommitFileSource, - CommitResult, - CommitSignature, - CommitTextFileOptions, - CreateCommitOptions, - LegacyCreateCommitOptions, + CommitBuilder, + CommitFileOptions, + CommitFileSource, + CommitResult, + CommitSignature, + CommitTextFileOptions, + CreateCommitOptions, + LegacyCreateCommitOptions, } from './types'; import { getUserAgent } from './version'; @@ -27,408 +27,416 @@ const HEADS_REF_PREFIX = 'refs/heads/'; type NodeBuffer = Uint8Array & { toString(encoding?: string): string }; interface NodeBufferConstructor { - from(data: Uint8Array): NodeBuffer; - from(data: string, encoding?: string): NodeBuffer; - isBuffer(value: unknown): value is NodeBuffer; + from(data: Uint8Array): NodeBuffer; + from(data: string, encoding?: string): NodeBuffer; + isBuffer(value: unknown): value is NodeBuffer; } const BufferCtor: NodeBufferConstructor | undefined = ( - globalThis as { Buffer?: NodeBufferConstructor } + globalThis as { Buffer?: NodeBufferConstructor } ).Buffer; interface CommitMetadataPayload { - target_branch: string; - expected_head_sha?: string; - base_branch?: string; - commit_message: string; - ephemeral?: boolean; - ephemeral_base?: boolean; - author: { - name: string; - email: string; - }; - committer?: { - name: string; - email: string; - }; - files: Array<{ - path: string; - content_id: string; - operation?: 'upsert' | 'delete'; - mode?: string; - }>; + target_branch: string; + expected_head_sha?: string; + base_branch?: string; + commit_message: string; + ephemeral?: boolean; + ephemeral_base?: boolean; + author: { + name: string; + email: string; + }; + committer?: { + name: string; + email: string; + }; + files: Array<{ + path: string; + content_id: string; + operation?: 'upsert' | 'delete'; + mode?: string; + }>; } interface CommitTransportRequest { - authorization: string; - signal?: AbortSignal; - metadata: CommitMetadataPayload; - blobs: Array<{ contentId: string; chunks: AsyncIterable }>; + authorization: string; + signal?: AbortSignal; + metadata: CommitMetadataPayload; + blobs: Array<{ contentId: string; chunks: AsyncIterable }>; } interface CommitTransport { - send(request: CommitTransportRequest): Promise; + send(request: CommitTransportRequest): Promise; } type NormalizedCommitOptions = { - targetBranch: string; - commitMessage: string; - expectedHeadSha?: string; - baseBranch?: string; - ephemeral?: boolean; - ephemeralBase?: boolean; - author: CommitSignature; - committer?: CommitSignature; - signal?: AbortSignal; - ttl?: number; + targetBranch: string; + commitMessage: string; + expectedHeadSha?: string; + baseBranch?: string; + ephemeral?: boolean; + ephemeralBase?: boolean; + author: CommitSignature; + committer?: CommitSignature; + signal?: AbortSignal; + ttl?: number; }; interface CommitBuilderDeps { - options: CreateCommitOptions; - getAuthToken: () => Promise; - transport: CommitTransport; + options: CreateCommitOptions; + getAuthToken: () => Promise; + transport: CommitTransport; } type FileOperationState = { - path: string; - contentId: string; - mode?: string; - operation: 'upsert' | 'delete'; - streamFactory?: () => AsyncIterable; + path: string; + contentId: string; + mode?: string; + operation: 'upsert' | 'delete'; + streamFactory?: () => AsyncIterable; }; export class CommitBuilderImpl implements CommitBuilder { - private readonly options: NormalizedCommitOptions; - private readonly getAuthToken: () => Promise; - private readonly transport: CommitTransport; - private readonly operations: FileOperationState[] = []; - private sent = false; - - constructor(deps: CommitBuilderDeps) { - this.options = normalizeCommitOptions(deps.options); - this.getAuthToken = deps.getAuthToken; - this.transport = deps.transport; - - const trimmedMessage = this.options.commitMessage?.trim(); - const trimmedAuthorName = this.options.author?.name?.trim(); - const trimmedAuthorEmail = this.options.author?.email?.trim(); - - if (!trimmedMessage) { - throw new Error('createCommit commitMessage is required'); - } - if (!trimmedAuthorName || !trimmedAuthorEmail) { - throw new Error('createCommit author name and email are required'); - } - this.options.commitMessage = trimmedMessage; - this.options.author = { - name: trimmedAuthorName, - email: trimmedAuthorEmail, - }; - if (typeof this.options.expectedHeadSha === 'string') { - this.options.expectedHeadSha = this.options.expectedHeadSha.trim(); - } - if (typeof this.options.baseBranch === 'string') { - const trimmedBase = this.options.baseBranch.trim(); - if (trimmedBase === '') { - delete this.options.baseBranch; - } else { - if (trimmedBase.startsWith('refs/')) { - throw new Error('createCommit baseBranch must not include refs/ prefix'); - } - this.options.baseBranch = trimmedBase; - } - } - - if (this.options.ephemeralBase && !this.options.baseBranch) { - throw new Error('createCommit ephemeralBase requires baseBranch'); - } - } - - addFile(path: string, source: CommitFileSource, options?: CommitFileOptions): CommitBuilder { - this.ensureNotSent(); - const normalizedPath = this.normalizePath(path); - const contentId = randomContentId(); - const mode = options?.mode ?? '100644'; - - this.operations.push({ - path: normalizedPath, - contentId, - mode, - operation: 'upsert', - streamFactory: () => toAsyncIterable(source), - }); - - return this; - } - - addFileFromString( - path: string, - contents: string, - options?: CommitTextFileOptions, - ): CommitBuilder { - const encoding = options?.encoding ?? 'utf8'; - const normalizedEncoding = encoding === 'utf-8' ? 'utf8' : encoding; - let data: Uint8Array; - if (normalizedEncoding === 'utf8') { - data = new TextEncoder().encode(contents); - } else if (BufferCtor) { - data = BufferCtor.from( - contents, - normalizedEncoding as Parameters[1], - ); - } else { - throw new Error( - `Unsupported encoding "${encoding}" in this environment. Non-UTF encodings require Node.js Buffer support.`, - ); - } - return this.addFile(path, data, options); - } - - deletePath(path: string): CommitBuilder { - this.ensureNotSent(); - const normalizedPath = this.normalizePath(path); - this.operations.push({ - path: normalizedPath, - contentId: randomContentId(), - operation: 'delete', - }); - return this; - } - - async send(): Promise { - this.ensureNotSent(); - this.sent = true; - - const metadata = this.buildMetadata(); - const blobEntries = this.operations - .filter((op) => op.operation === 'upsert' && op.streamFactory) - .map((op) => ({ - contentId: op.contentId, - chunks: chunkify(op.streamFactory!()), - })); - - const authorization = await this.getAuthToken(); - const ack = await this.transport.send({ - authorization, - signal: this.options.signal, - metadata, - blobs: blobEntries, - }); - return buildCommitResult(ack); - } - - private buildMetadata(): CommitMetadataPayload { - const files = this.operations.map((op) => { - const entry: CommitMetadataPayload['files'][number] = { - path: op.path, - content_id: op.contentId, - operation: op.operation, - }; - if (op.mode) { - entry.mode = op.mode; - } - return entry; - }); - - const metadata: CommitMetadataPayload = { - target_branch: this.options.targetBranch, - commit_message: this.options.commitMessage, - author: { - name: this.options.author.name, - email: this.options.author.email, - }, - files, - }; - - if (this.options.expectedHeadSha) { - metadata.expected_head_sha = this.options.expectedHeadSha; - } - if (this.options.baseBranch) { - metadata.base_branch = this.options.baseBranch; - } - if (this.options.committer) { - metadata.committer = { - name: this.options.committer.name, - email: this.options.committer.email, - }; - } - - if (this.options.ephemeral) { - metadata.ephemeral = true; - } - if (this.options.ephemeralBase) { - metadata.ephemeral_base = true; - } - - return metadata; - } - - private ensureNotSent(): void { - if (this.sent) { - throw new Error('createCommit builder cannot be reused after send()'); - } - } - - private normalizePath(path: string): string { - if (!path || typeof path !== 'string' || path.trim() === '') { - throw new Error('File path must be a non-empty string'); - } - return path.replace(/^\//, ''); - } + private readonly options: NormalizedCommitOptions; + private readonly getAuthToken: () => Promise; + private readonly transport: CommitTransport; + private readonly operations: FileOperationState[] = []; + private sent = false; + + constructor(deps: CommitBuilderDeps) { + this.options = normalizeCommitOptions(deps.options); + this.getAuthToken = deps.getAuthToken; + this.transport = deps.transport; + + const trimmedMessage = this.options.commitMessage?.trim(); + const trimmedAuthorName = this.options.author?.name?.trim(); + const trimmedAuthorEmail = this.options.author?.email?.trim(); + + if (!trimmedMessage) { + throw new Error('createCommit commitMessage is required'); + } + if (!trimmedAuthorName || !trimmedAuthorEmail) { + throw new Error('createCommit author name and email are required'); + } + this.options.commitMessage = trimmedMessage; + this.options.author = { + name: trimmedAuthorName, + email: trimmedAuthorEmail, + }; + if (typeof this.options.expectedHeadSha === 'string') { + this.options.expectedHeadSha = this.options.expectedHeadSha.trim(); + } + if (typeof this.options.baseBranch === 'string') { + const trimmedBase = this.options.baseBranch.trim(); + if (trimmedBase === '') { + delete this.options.baseBranch; + } else { + if (trimmedBase.startsWith('refs/')) { + throw new Error( + 'createCommit baseBranch must not include refs/ prefix' + ); + } + this.options.baseBranch = trimmedBase; + } + } + + if (this.options.ephemeralBase && !this.options.baseBranch) { + throw new Error('createCommit ephemeralBase requires baseBranch'); + } + } + + addFile( + path: string, + source: CommitFileSource, + options?: CommitFileOptions + ): CommitBuilder { + this.ensureNotSent(); + const normalizedPath = this.normalizePath(path); + const contentId = randomContentId(); + const mode = options?.mode ?? '100644'; + + this.operations.push({ + path: normalizedPath, + contentId, + mode, + operation: 'upsert', + streamFactory: () => toAsyncIterable(source), + }); + + return this; + } + + addFileFromString( + path: string, + contents: string, + options?: CommitTextFileOptions + ): CommitBuilder { + const encoding = options?.encoding ?? 'utf8'; + const normalizedEncoding = encoding === 'utf-8' ? 'utf8' : encoding; + let data: Uint8Array; + if (normalizedEncoding === 'utf8') { + data = new TextEncoder().encode(contents); + } else if (BufferCtor) { + data = BufferCtor.from( + contents, + normalizedEncoding as Parameters[1] + ); + } else { + throw new Error( + `Unsupported encoding "${encoding}" in this environment. Non-UTF encodings require Node.js Buffer support.` + ); + } + return this.addFile(path, data, options); + } + + deletePath(path: string): CommitBuilder { + this.ensureNotSent(); + const normalizedPath = this.normalizePath(path); + this.operations.push({ + path: normalizedPath, + contentId: randomContentId(), + operation: 'delete', + }); + return this; + } + + async send(): Promise { + this.ensureNotSent(); + this.sent = true; + + const metadata = this.buildMetadata(); + const blobEntries = this.operations + .filter((op) => op.operation === 'upsert' && op.streamFactory) + .map((op) => ({ + contentId: op.contentId, + chunks: chunkify(op.streamFactory!()), + })); + + const authorization = await this.getAuthToken(); + const ack = await this.transport.send({ + authorization, + signal: this.options.signal, + metadata, + blobs: blobEntries, + }); + return buildCommitResult(ack); + } + + private buildMetadata(): CommitMetadataPayload { + const files = this.operations.map((op) => { + const entry: CommitMetadataPayload['files'][number] = { + path: op.path, + content_id: op.contentId, + operation: op.operation, + }; + if (op.mode) { + entry.mode = op.mode; + } + return entry; + }); + + const metadata: CommitMetadataPayload = { + target_branch: this.options.targetBranch, + commit_message: this.options.commitMessage, + author: { + name: this.options.author.name, + email: this.options.author.email, + }, + files, + }; + + if (this.options.expectedHeadSha) { + metadata.expected_head_sha = this.options.expectedHeadSha; + } + if (this.options.baseBranch) { + metadata.base_branch = this.options.baseBranch; + } + if (this.options.committer) { + metadata.committer = { + name: this.options.committer.name, + email: this.options.committer.email, + }; + } + + if (this.options.ephemeral) { + metadata.ephemeral = true; + } + if (this.options.ephemeralBase) { + metadata.ephemeral_base = true; + } + + return metadata; + } + + private ensureNotSent(): void { + if (this.sent) { + throw new Error('createCommit builder cannot be reused after send()'); + } + } + + private normalizePath(path: string): string { + if (!path || typeof path !== 'string' || path.trim() === '') { + throw new Error('File path must be a non-empty string'); + } + return path.replace(/^\//, ''); + } } export class FetchCommitTransport implements CommitTransport { - private readonly url: string; - - constructor(config: { baseUrl: string; version: number }) { - const trimmedBase = config.baseUrl.replace(/\/+$/, ''); - this.url = `${trimmedBase}/api/v${config.version}/repos/commit-pack`; - } - - async send(request: CommitTransportRequest): Promise { - const bodyIterable = buildMessageIterable(request.metadata, request.blobs); - const body = toRequestBody(bodyIterable); - - const init: RequestInit = { - method: 'POST', - headers: { - Authorization: `Bearer ${request.authorization}`, - 'Content-Type': 'application/x-ndjson', - Accept: 'application/json', - 'Code-Storage-Agent': getUserAgent(), - }, - body: body as any, - signal: request.signal, - }; - - if (requiresDuplex(body)) { - (init as RequestInit & { duplex: 'half' }).duplex = 'half'; - } - - const response = await fetch(this.url, init); - - if (!response.ok) { - const fallbackMessage = `createCommit request failed (${response.status} ${response.statusText})`; - const { statusMessage, statusLabel, refUpdate } = await parseCommitPackError( - response, - fallbackMessage, - ); - throw new RefUpdateError(statusMessage, { - status: statusLabel, - message: statusMessage, - refUpdate, - }); - } - - const ack = commitPackAckSchema.parse(await response.json()); - return ack; - } + private readonly url: string; + + constructor(config: { baseUrl: string; version: number }) { + const trimmedBase = config.baseUrl.replace(/\/+$/, ''); + this.url = `${trimmedBase}/api/v${config.version}/repos/commit-pack`; + } + + async send(request: CommitTransportRequest): Promise { + const bodyIterable = buildMessageIterable(request.metadata, request.blobs); + const body = toRequestBody(bodyIterable); + + const init: RequestInit = { + method: 'POST', + headers: { + Authorization: `Bearer ${request.authorization}`, + 'Content-Type': 'application/x-ndjson', + Accept: 'application/json', + 'Code-Storage-Agent': getUserAgent(), + }, + body: body as any, + signal: request.signal, + }; + + if (requiresDuplex(body)) { + (init as RequestInit & { duplex: 'half' }).duplex = 'half'; + } + + const response = await fetch(this.url, init); + + if (!response.ok) { + const fallbackMessage = `createCommit request failed (${response.status} ${response.statusText})`; + const { statusMessage, statusLabel, refUpdate } = + await parseCommitPackError(response, fallbackMessage); + throw new RefUpdateError(statusMessage, { + status: statusLabel, + message: statusMessage, + refUpdate, + }); + } + + const ack = commitPackAckSchema.parse(await response.json()); + return ack; + } } function buildMessageIterable( - metadata: CommitMetadataPayload, - blobs: Array<{ contentId: string; chunks: AsyncIterable }>, + metadata: CommitMetadataPayload, + blobs: Array<{ contentId: string; chunks: AsyncIterable }> ): AsyncIterable { - const encoder = new TextEncoder(); - return { - async *[Symbol.asyncIterator]() { - yield encoder.encode(`${JSON.stringify({ metadata })}\n`); - for (const blob of blobs) { - for await (const segment of blob.chunks) { - const payload = { - blob_chunk: { - content_id: blob.contentId, - data: base64Encode(segment.chunk), - eof: segment.eof, - }, - }; - yield encoder.encode(`${JSON.stringify(payload)}\n`); - } - } - }, - }; + const encoder = new TextEncoder(); + return { + async *[Symbol.asyncIterator]() { + yield encoder.encode(`${JSON.stringify({ metadata })}\n`); + for (const blob of blobs) { + for await (const segment of blob.chunks) { + const payload = { + blob_chunk: { + content_id: blob.contentId, + data: base64Encode(segment.chunk), + eof: segment.eof, + }, + }; + yield encoder.encode(`${JSON.stringify(payload)}\n`); + } + } + }, + }; } function randomContentId(): string { - const cryptoObj = globalThis.crypto; - if (cryptoObj && typeof cryptoObj.randomUUID === 'function') { - return cryptoObj.randomUUID(); - } - const random = Math.random().toString(36).slice(2); - return `cid-${Date.now().toString(36)}-${random}`; + const cryptoObj = globalThis.crypto; + if (cryptoObj && typeof cryptoObj.randomUUID === 'function') { + return cryptoObj.randomUUID(); + } + const random = Math.random().toString(36).slice(2); + return `cid-${Date.now().toString(36)}-${random}`; } -function normalizeCommitOptions(options: CreateCommitOptions): NormalizedCommitOptions { - return { - targetBranch: resolveTargetBranch(options), - commitMessage: options.commitMessage, - expectedHeadSha: options.expectedHeadSha, - baseBranch: options.baseBranch, - ephemeral: options.ephemeral === true, - ephemeralBase: options.ephemeralBase === true, - author: options.author, - committer: options.committer, - signal: options.signal, - ttl: options.ttl, - }; +function normalizeCommitOptions( + options: CreateCommitOptions +): NormalizedCommitOptions { + return { + targetBranch: resolveTargetBranch(options), + commitMessage: options.commitMessage, + expectedHeadSha: options.expectedHeadSha, + baseBranch: options.baseBranch, + ephemeral: options.ephemeral === true, + ephemeralBase: options.ephemeralBase === true, + author: options.author, + committer: options.committer, + signal: options.signal, + ttl: options.ttl, + }; } function resolveTargetBranch(options: CreateCommitOptions): string { - const branchCandidate = - typeof options.targetBranch === 'string' ? options.targetBranch.trim() : ''; - if (branchCandidate) { - return normalizeBranchName(branchCandidate); - } - if (hasLegacyTargetRef(options)) { - return normalizeLegacyTargetRef(options.targetRef); - } - throw new Error('createCommit targetBranch is required'); + const branchCandidate = + typeof options.targetBranch === 'string' ? options.targetBranch.trim() : ''; + if (branchCandidate) { + return normalizeBranchName(branchCandidate); + } + if (hasLegacyTargetRef(options)) { + return normalizeLegacyTargetRef(options.targetRef); + } + throw new Error('createCommit targetBranch is required'); } function normalizeBranchName(value: string): string { - const trimmed = value.trim(); - if (!trimmed) { - throw new Error('createCommit targetBranch is required'); - } - if (trimmed.startsWith(HEADS_REF_PREFIX)) { - const branch = trimmed.slice(HEADS_REF_PREFIX.length).trim(); - if (!branch) { - throw new Error('createCommit targetBranch is required'); - } - return branch; - } - if (trimmed.startsWith('refs/')) { - throw new Error('createCommit targetBranch must not include refs/ prefix'); - } - return trimmed; + const trimmed = value.trim(); + if (!trimmed) { + throw new Error('createCommit targetBranch is required'); + } + if (trimmed.startsWith(HEADS_REF_PREFIX)) { + const branch = trimmed.slice(HEADS_REF_PREFIX.length).trim(); + if (!branch) { + throw new Error('createCommit targetBranch is required'); + } + return branch; + } + if (trimmed.startsWith('refs/')) { + throw new Error('createCommit targetBranch must not include refs/ prefix'); + } + return trimmed; } function normalizeLegacyTargetRef(ref: string): string { - const trimmed = ref.trim(); - if (!trimmed) { - throw new Error('createCommit targetRef is required'); - } - if (!trimmed.startsWith(HEADS_REF_PREFIX)) { - throw new Error('createCommit targetRef must start with refs/heads/'); - } - const branch = trimmed.slice(HEADS_REF_PREFIX.length).trim(); - if (!branch) { - throw new Error('createCommit targetRef must include a branch name'); - } - return branch; + const trimmed = ref.trim(); + if (!trimmed) { + throw new Error('createCommit targetRef is required'); + } + if (!trimmed.startsWith(HEADS_REF_PREFIX)) { + throw new Error('createCommit targetRef must start with refs/heads/'); + } + const branch = trimmed.slice(HEADS_REF_PREFIX.length).trim(); + if (!branch) { + throw new Error('createCommit targetRef must include a branch name'); + } + return branch; } -function hasLegacyTargetRef(options: CreateCommitOptions): options is LegacyCreateCommitOptions { - return typeof (options as LegacyCreateCommitOptions).targetRef === 'string'; +function hasLegacyTargetRef( + options: CreateCommitOptions +): options is LegacyCreateCommitOptions { + return typeof (options as LegacyCreateCommitOptions).targetRef === 'string'; } export function createCommitBuilder(deps: CommitBuilderDeps): CommitBuilder { - return new CommitBuilderImpl(deps); + return new CommitBuilderImpl(deps); } export function resolveCommitTtlSeconds(options?: { ttl?: number }): number { - if (typeof options?.ttl === 'number' && options.ttl > 0) { - return options.ttl; - } - return DEFAULT_TTL_SECONDS; + if (typeof options?.ttl === 'number' && options.ttl > 0) { + return options.ttl; + } + return DEFAULT_TTL_SECONDS; } diff --git a/packages/git-storage-sdk-node/src/diff-commit.ts b/packages/git-storage-sdk-node/src/diff-commit.ts index 97ed1f54a..d92f691e0 100644 --- a/packages/git-storage-sdk-node/src/diff-commit.ts +++ b/packages/git-storage-sdk-node/src/diff-commit.ts @@ -3,300 +3,313 @@ import { RefUpdateError } from './errors'; import type { CommitPackAckRaw } from './schemas'; import { commitPackAckSchema } from './schemas'; import { - base64Encode, - type ChunkSegment, - chunkify, - requiresDuplex, - toAsyncIterable, - toRequestBody, + type ChunkSegment, + base64Encode, + chunkify, + requiresDuplex, + toAsyncIterable, + toRequestBody, } from './stream-utils'; import type { - CommitResult, - CommitSignature, - CreateCommitFromDiffOptions, - DiffSource, + CommitResult, + CommitSignature, + CreateCommitFromDiffOptions, + DiffSource, } from './types'; import { getUserAgent } from './version'; interface DiffCommitMetadataPayload { - target_branch: string; - expected_head_sha?: string; - base_branch?: string; - commit_message: string; - ephemeral?: boolean; - ephemeral_base?: boolean; - author: { - name: string; - email: string; - }; - committer?: { - name: string; - email: string; - }; + target_branch: string; + expected_head_sha?: string; + base_branch?: string; + commit_message: string; + ephemeral?: boolean; + ephemeral_base?: boolean; + author: { + name: string; + email: string; + }; + committer?: { + name: string; + email: string; + }; } interface DiffCommitTransportRequest { - authorization: string; - signal?: AbortSignal; - metadata: DiffCommitMetadataPayload; - diffChunks: AsyncIterable; + authorization: string; + signal?: AbortSignal; + metadata: DiffCommitMetadataPayload; + diffChunks: AsyncIterable; } interface DiffCommitTransport { - send(request: DiffCommitTransportRequest): Promise; + send(request: DiffCommitTransportRequest): Promise; } type NormalizedDiffCommitOptions = { - targetBranch: string; - commitMessage: string; - expectedHeadSha?: string; - baseBranch?: string; - ephemeral?: boolean; - ephemeralBase?: boolean; - author: CommitSignature; - committer?: CommitSignature; - signal?: AbortSignal; - ttl?: number; - initialDiff: DiffSource; + targetBranch: string; + commitMessage: string; + expectedHeadSha?: string; + baseBranch?: string; + ephemeral?: boolean; + ephemeralBase?: boolean; + author: CommitSignature; + committer?: CommitSignature; + signal?: AbortSignal; + ttl?: number; + initialDiff: DiffSource; }; interface CommitFromDiffSendDeps { - options: CreateCommitFromDiffOptions; - getAuthToken: () => Promise; - transport: DiffCommitTransport; + options: CreateCommitFromDiffOptions; + getAuthToken: () => Promise; + transport: DiffCommitTransport; } class DiffCommitExecutor { - private readonly options: NormalizedDiffCommitOptions; - private readonly getAuthToken: () => Promise; - private readonly transport: DiffCommitTransport; - private readonly diffFactory: () => AsyncIterable; - private sent = false; - - constructor(deps: CommitFromDiffSendDeps) { - this.options = normalizeDiffCommitOptions(deps.options); - this.getAuthToken = deps.getAuthToken; - this.transport = deps.transport; - - const trimmedMessage = this.options.commitMessage?.trim(); - const trimmedAuthorName = this.options.author?.name?.trim(); - const trimmedAuthorEmail = this.options.author?.email?.trim(); - - if (!trimmedMessage) { - throw new Error('createCommitFromDiff commitMessage is required'); - } - if (!trimmedAuthorName || !trimmedAuthorEmail) { - throw new Error('createCommitFromDiff author name and email are required'); - } - - this.options.commitMessage = trimmedMessage; - this.options.author = { - name: trimmedAuthorName, - email: trimmedAuthorEmail, - }; - - if (typeof this.options.expectedHeadSha === 'string') { - this.options.expectedHeadSha = this.options.expectedHeadSha.trim(); - } - if (typeof this.options.baseBranch === 'string') { - const trimmedBase = this.options.baseBranch.trim(); - if (trimmedBase === '') { - delete this.options.baseBranch; - } else { - if (trimmedBase.startsWith('refs/')) { - throw new Error('createCommitFromDiff baseBranch must not include refs/ prefix'); - } - this.options.baseBranch = trimmedBase; - } - } - if (this.options.ephemeralBase && !this.options.baseBranch) { - throw new Error('createCommitFromDiff ephemeralBase requires baseBranch'); - } - - this.diffFactory = () => toAsyncIterable(this.options.initialDiff); - } - - async send(): Promise { - this.ensureNotSent(); - this.sent = true; - - const metadata = this.buildMetadata(); - const diffIterable = chunkify(this.diffFactory()); - - const authorization = await this.getAuthToken(); - const ack = await this.transport.send({ - authorization, - signal: this.options.signal, - metadata, - diffChunks: diffIterable, - }); - - return buildCommitResult(ack); - } - - private buildMetadata(): DiffCommitMetadataPayload { - const metadata: DiffCommitMetadataPayload = { - target_branch: this.options.targetBranch, - commit_message: this.options.commitMessage, - author: { - name: this.options.author.name, - email: this.options.author.email, - }, - }; - - if (this.options.expectedHeadSha) { - metadata.expected_head_sha = this.options.expectedHeadSha; - } - if (this.options.baseBranch) { - metadata.base_branch = this.options.baseBranch; - } - if (this.options.committer) { - metadata.committer = { - name: this.options.committer.name, - email: this.options.committer.email, - }; - } - if (this.options.ephemeral) { - metadata.ephemeral = true; - } - if (this.options.ephemeralBase) { - metadata.ephemeral_base = true; - } - - return metadata; - } - - private ensureNotSent(): void { - if (this.sent) { - throw new Error('createCommitFromDiff cannot be reused after send()'); - } - } + private readonly options: NormalizedDiffCommitOptions; + private readonly getAuthToken: () => Promise; + private readonly transport: DiffCommitTransport; + private readonly diffFactory: () => AsyncIterable; + private sent = false; + + constructor(deps: CommitFromDiffSendDeps) { + this.options = normalizeDiffCommitOptions(deps.options); + this.getAuthToken = deps.getAuthToken; + this.transport = deps.transport; + + const trimmedMessage = this.options.commitMessage?.trim(); + const trimmedAuthorName = this.options.author?.name?.trim(); + const trimmedAuthorEmail = this.options.author?.email?.trim(); + + if (!trimmedMessage) { + throw new Error('createCommitFromDiff commitMessage is required'); + } + if (!trimmedAuthorName || !trimmedAuthorEmail) { + throw new Error( + 'createCommitFromDiff author name and email are required' + ); + } + + this.options.commitMessage = trimmedMessage; + this.options.author = { + name: trimmedAuthorName, + email: trimmedAuthorEmail, + }; + + if (typeof this.options.expectedHeadSha === 'string') { + this.options.expectedHeadSha = this.options.expectedHeadSha.trim(); + } + if (typeof this.options.baseBranch === 'string') { + const trimmedBase = this.options.baseBranch.trim(); + if (trimmedBase === '') { + delete this.options.baseBranch; + } else { + if (trimmedBase.startsWith('refs/')) { + throw new Error( + 'createCommitFromDiff baseBranch must not include refs/ prefix' + ); + } + this.options.baseBranch = trimmedBase; + } + } + if (this.options.ephemeralBase && !this.options.baseBranch) { + throw new Error('createCommitFromDiff ephemeralBase requires baseBranch'); + } + + this.diffFactory = () => toAsyncIterable(this.options.initialDiff); + } + + async send(): Promise { + this.ensureNotSent(); + this.sent = true; + + const metadata = this.buildMetadata(); + const diffIterable = chunkify(this.diffFactory()); + + const authorization = await this.getAuthToken(); + const ack = await this.transport.send({ + authorization, + signal: this.options.signal, + metadata, + diffChunks: diffIterable, + }); + + return buildCommitResult(ack); + } + + private buildMetadata(): DiffCommitMetadataPayload { + const metadata: DiffCommitMetadataPayload = { + target_branch: this.options.targetBranch, + commit_message: this.options.commitMessage, + author: { + name: this.options.author.name, + email: this.options.author.email, + }, + }; + + if (this.options.expectedHeadSha) { + metadata.expected_head_sha = this.options.expectedHeadSha; + } + if (this.options.baseBranch) { + metadata.base_branch = this.options.baseBranch; + } + if (this.options.committer) { + metadata.committer = { + name: this.options.committer.name, + email: this.options.committer.email, + }; + } + if (this.options.ephemeral) { + metadata.ephemeral = true; + } + if (this.options.ephemeralBase) { + metadata.ephemeral_base = true; + } + + return metadata; + } + + private ensureNotSent(): void { + if (this.sent) { + throw new Error('createCommitFromDiff cannot be reused after send()'); + } + } } export class FetchDiffCommitTransport implements DiffCommitTransport { - private readonly url: string; - - constructor(config: { baseUrl: string; version: number }) { - const trimmedBase = config.baseUrl.replace(/\/+$/, ''); - this.url = `${trimmedBase}/api/v${config.version}/repos/diff-commit`; - } - - async send(request: DiffCommitTransportRequest): Promise { - const bodyIterable = buildMessageIterable(request.metadata, request.diffChunks); - const body = toRequestBody(bodyIterable); - - const init: RequestInit = { - method: 'POST', - headers: { - Authorization: `Bearer ${request.authorization}`, - 'Content-Type': 'application/x-ndjson', - Accept: 'application/json', - 'Code-Storage-Agent': getUserAgent(), - }, - body: body as any, - signal: request.signal, - }; - - if (requiresDuplex(body)) { - (init as RequestInit & { duplex: 'half' }).duplex = 'half'; - } - - const response = await fetch(this.url, init); - if (!response.ok) { - const fallbackMessage = `createCommitFromDiff request failed (${response.status} ${response.statusText})`; - const { statusMessage, statusLabel, refUpdate } = await parseCommitPackError( - response, - fallbackMessage, - ); - throw new RefUpdateError(statusMessage, { - status: statusLabel, - message: statusMessage, - refUpdate, - }); - } - - return commitPackAckSchema.parse(await response.json()); - } + private readonly url: string; + + constructor(config: { baseUrl: string; version: number }) { + const trimmedBase = config.baseUrl.replace(/\/+$/, ''); + this.url = `${trimmedBase}/api/v${config.version}/repos/diff-commit`; + } + + async send(request: DiffCommitTransportRequest): Promise { + const bodyIterable = buildMessageIterable( + request.metadata, + request.diffChunks + ); + const body = toRequestBody(bodyIterable); + + const init: RequestInit = { + method: 'POST', + headers: { + Authorization: `Bearer ${request.authorization}`, + 'Content-Type': 'application/x-ndjson', + Accept: 'application/json', + 'Code-Storage-Agent': getUserAgent(), + }, + body: body as any, + signal: request.signal, + }; + + if (requiresDuplex(body)) { + (init as RequestInit & { duplex: 'half' }).duplex = 'half'; + } + + const response = await fetch(this.url, init); + if (!response.ok) { + const fallbackMessage = `createCommitFromDiff request failed (${response.status} ${response.statusText})`; + const { statusMessage, statusLabel, refUpdate } = + await parseCommitPackError(response, fallbackMessage); + throw new RefUpdateError(statusMessage, { + status: statusLabel, + message: statusMessage, + refUpdate, + }); + } + + return commitPackAckSchema.parse(await response.json()); + } } function buildMessageIterable( - metadata: DiffCommitMetadataPayload, - diffChunks: AsyncIterable, + metadata: DiffCommitMetadataPayload, + diffChunks: AsyncIterable ): AsyncIterable { - const encoder = new TextEncoder(); - return { - async *[Symbol.asyncIterator]() { - yield encoder.encode(`${JSON.stringify({ metadata })}\n`); - for await (const segment of diffChunks) { - const payload = { - diff_chunk: { - data: base64Encode(segment.chunk), - eof: segment.eof, - }, - }; - yield encoder.encode(`${JSON.stringify(payload)}\n`); - } - }, - }; + const encoder = new TextEncoder(); + return { + async *[Symbol.asyncIterator]() { + yield encoder.encode(`${JSON.stringify({ metadata })}\n`); + for await (const segment of diffChunks) { + const payload = { + diff_chunk: { + data: base64Encode(segment.chunk), + eof: segment.eof, + }, + }; + yield encoder.encode(`${JSON.stringify(payload)}\n`); + } + }, + }; } function normalizeDiffCommitOptions( - options: CreateCommitFromDiffOptions, + options: CreateCommitFromDiffOptions ): NormalizedDiffCommitOptions { - if (!options || typeof options !== 'object') { - throw new Error('createCommitFromDiff options are required'); - } - - if (options.diff === undefined || options.diff === null) { - throw new Error('createCommitFromDiff diff is required'); - } - - const targetBranch = normalizeBranchName(options.targetBranch); - - let committer: CommitSignature | undefined; - if (options.committer) { - const name = options.committer.name?.trim(); - const email = options.committer.email?.trim(); - if (!name || !email) { - throw new Error('createCommitFromDiff committer name and email are required when provided'); - } - committer = { name, email }; - } - - return { - targetBranch, - commitMessage: options.commitMessage, - expectedHeadSha: options.expectedHeadSha, - baseBranch: options.baseBranch, - ephemeral: options.ephemeral === true, - ephemeralBase: options.ephemeralBase === true, - author: options.author, - committer, - signal: options.signal, - ttl: options.ttl, - initialDiff: options.diff, - }; + if (!options || typeof options !== 'object') { + throw new Error('createCommitFromDiff options are required'); + } + + if (options.diff === undefined || options.diff === null) { + throw new Error('createCommitFromDiff diff is required'); + } + + const targetBranch = normalizeBranchName(options.targetBranch); + + let committer: CommitSignature | undefined; + if (options.committer) { + const name = options.committer.name?.trim(); + const email = options.committer.email?.trim(); + if (!name || !email) { + throw new Error( + 'createCommitFromDiff committer name and email are required when provided' + ); + } + committer = { name, email }; + } + + return { + targetBranch, + commitMessage: options.commitMessage, + expectedHeadSha: options.expectedHeadSha, + baseBranch: options.baseBranch, + ephemeral: options.ephemeral === true, + ephemeralBase: options.ephemeralBase === true, + author: options.author, + committer, + signal: options.signal, + ttl: options.ttl, + initialDiff: options.diff, + }; } function normalizeBranchName(value: string | undefined): string { - const trimmed = value?.trim(); - if (!trimmed) { - throw new Error('createCommitFromDiff targetBranch is required'); - } - if (trimmed.startsWith('refs/heads/')) { - const branch = trimmed.slice('refs/heads/'.length).trim(); - if (!branch) { - throw new Error('createCommitFromDiff targetBranch must include a branch name'); - } - return branch; - } - if (trimmed.startsWith('refs/')) { - throw new Error('createCommitFromDiff targetBranch must not include refs/ prefix'); - } - return trimmed; + const trimmed = value?.trim(); + if (!trimmed) { + throw new Error('createCommitFromDiff targetBranch is required'); + } + if (trimmed.startsWith('refs/heads/')) { + const branch = trimmed.slice('refs/heads/'.length).trim(); + if (!branch) { + throw new Error( + 'createCommitFromDiff targetBranch must include a branch name' + ); + } + return branch; + } + if (trimmed.startsWith('refs/')) { + throw new Error( + 'createCommitFromDiff targetBranch must not include refs/ prefix' + ); + } + return trimmed; } -export async function sendCommitFromDiff(deps: CommitFromDiffSendDeps): Promise { - const executor = new DiffCommitExecutor(deps); - return executor.send(); +export async function sendCommitFromDiff( + deps: CommitFromDiffSendDeps +): Promise { + const executor = new DiffCommitExecutor(deps); + return executor.send(); } diff --git a/packages/git-storage-sdk-node/src/errors.ts b/packages/git-storage-sdk-node/src/errors.ts index d33a69391..5271ab470 100644 --- a/packages/git-storage-sdk-node/src/errors.ts +++ b/packages/git-storage-sdk-node/src/errors.ts @@ -1,50 +1,50 @@ import type { RefUpdate, RefUpdateReason } from './types'; export interface RefUpdateErrorOptions { - status: string; - message?: string; - refUpdate?: Partial; - reason?: RefUpdateReason; + status: string; + message?: string; + refUpdate?: Partial; + reason?: RefUpdateReason; } export class RefUpdateError extends Error { - public readonly status: string; - public readonly reason: RefUpdateReason; - public readonly refUpdate?: Partial; + public readonly status: string; + public readonly reason: RefUpdateReason; + public readonly refUpdate?: Partial; - constructor(message: string, options: RefUpdateErrorOptions) { - super(message); - this.name = 'RefUpdateError'; - this.status = options.status; - this.reason = options.reason ?? inferRefUpdateReason(options.status); - this.refUpdate = options.refUpdate; - } + constructor(message: string, options: RefUpdateErrorOptions) { + super(message); + this.name = 'RefUpdateError'; + this.status = options.status; + this.reason = options.reason ?? inferRefUpdateReason(options.status); + this.refUpdate = options.refUpdate; + } } const REF_REASON_MAP: Record = { - precondition_failed: 'precondition_failed', - conflict: 'conflict', - not_found: 'not_found', - invalid: 'invalid', - timeout: 'timeout', - unauthorized: 'unauthorized', - forbidden: 'forbidden', - unavailable: 'unavailable', - internal: 'internal', - failed: 'failed', - ok: 'unknown', + precondition_failed: 'precondition_failed', + conflict: 'conflict', + not_found: 'not_found', + invalid: 'invalid', + timeout: 'timeout', + unauthorized: 'unauthorized', + forbidden: 'forbidden', + unavailable: 'unavailable', + internal: 'internal', + failed: 'failed', + ok: 'unknown', }; export function inferRefUpdateReason(status?: string): RefUpdateReason { - if (!status) { - return 'unknown'; - } + if (!status) { + return 'unknown'; + } - const trimmed = status.trim(); - if (trimmed === '') { - return 'unknown'; - } + const trimmed = status.trim(); + if (trimmed === '') { + return 'unknown'; + } - const label = trimmed.toLowerCase(); - return REF_REASON_MAP[label] ?? 'unknown'; + const label = trimmed.toLowerCase(); + return REF_REASON_MAP[label] ?? 'unknown'; } diff --git a/packages/git-storage-sdk-node/src/fetch.ts b/packages/git-storage-sdk-node/src/fetch.ts index a9c9ece0f..1ba8ffdd3 100644 --- a/packages/git-storage-sdk-node/src/fetch.ts +++ b/packages/git-storage-sdk-node/src/fetch.ts @@ -3,151 +3,156 @@ import type { ValidAPIVersion, ValidMethod, ValidPath } from './types'; import { getUserAgent } from './version'; interface RequestOptions { - allowedStatus?: number[]; + allowedStatus?: number[]; } export class ApiError extends Error { - public readonly status: number; - public readonly statusText: string; - public readonly method: ValidMethod; - public readonly url: string; - public readonly body?: unknown; - - constructor(params: { - message: string; - status: number; - statusText: string; - method: ValidMethod; - url: string; - body?: unknown; - }) { - super(params.message); - this.name = 'ApiError'; - this.status = params.status; - this.statusText = params.statusText; - this.method = params.method; - this.url = params.url; - this.body = params.body; - } + public readonly status: number; + public readonly statusText: string; + public readonly method: ValidMethod; + public readonly url: string; + public readonly body?: unknown; + + constructor(params: { + message: string; + status: number; + statusText: string; + method: ValidMethod; + url: string; + body?: unknown; + }) { + super(params.message); + this.name = 'ApiError'; + this.status = params.status; + this.statusText = params.statusText; + this.method = params.method; + this.url = params.url; + this.body = params.body; + } } export class ApiFetcher { - constructor( - private readonly API_BASE_URL: string, - private readonly version: ValidAPIVersion, - ) {} - - private getBaseUrl() { - return `${this.API_BASE_URL}/api/v${this.version}`; - } - - private getRequestUrl(path: ValidPath) { - if (typeof path === 'string') { - return `${this.getBaseUrl()}/${path}`; - } else if (path.params) { - const searchParams = new URLSearchParams(); - for (const [key, value] of Object.entries(path.params)) { - if (Array.isArray(value)) { - for (const v of value) { - searchParams.append(key, v); - } - } else { - searchParams.append(key, value); - } - } - const paramStr = searchParams.toString(); - return `${this.getBaseUrl()}/${path.path}${paramStr ? `?${paramStr}` : ''}`; - } else { - return `${this.getBaseUrl()}/${path.path}`; - } - } - - private async fetch(path: ValidPath, method: ValidMethod, jwt: string, options?: RequestOptions) { - const requestUrl = this.getRequestUrl(path); - - const requestOptions: RequestInit = { - method, - headers: { - Authorization: `Bearer ${jwt}`, - 'Content-Type': 'application/json', - 'Code-Storage-Agent': getUserAgent(), - }, - }; - - if (method !== 'GET' && typeof path !== 'string' && path.body) { - requestOptions.body = JSON.stringify(path.body); - } - - const response = await fetch(requestUrl, requestOptions); - - if (!response.ok) { - const allowed = options?.allowedStatus ?? []; - if (allowed.includes(response.status)) { - return response; - } - - let errorBody: unknown; - let message: string | undefined; - const contentType = response.headers.get('content-type') ?? ''; - - try { - if (contentType.includes('application/json')) { - errorBody = await response.json(); - } else { - const text = await response.text(); - errorBody = text; - } - } catch { - // Fallback to plain text if JSON parse failed after reading body - try { - errorBody = await response.text(); - } catch { - errorBody = undefined; - } - } - - if (typeof errorBody === 'string') { - const trimmed = errorBody.trim(); - if (trimmed) { - message = trimmed; - } - } else if (errorBody && typeof errorBody === 'object') { - const parsedError = errorEnvelopeSchema.safeParse(errorBody); - if (parsedError.success) { - const trimmed = parsedError.data.error.trim(); - if (trimmed) { - message = trimmed; - } - } - } - - throw new ApiError({ - message: - message ?? - `Request ${method} ${requestUrl} failed with status ${response.status} ${response.statusText}`, - status: response.status, - statusText: response.statusText, - method, - url: requestUrl, - body: errorBody, - }); - } - return response; - } - - async get(path: ValidPath, jwt: string, options?: RequestOptions) { - return this.fetch(path, 'GET', jwt, options); - } - - async post(path: ValidPath, jwt: string, options?: RequestOptions) { - return this.fetch(path, 'POST', jwt, options); - } - - async put(path: ValidPath, jwt: string, options?: RequestOptions) { - return this.fetch(path, 'PUT', jwt, options); - } - - async delete(path: ValidPath, jwt: string, options?: RequestOptions) { - return this.fetch(path, 'DELETE', jwt, options); - } + constructor( + private readonly API_BASE_URL: string, + private readonly version: ValidAPIVersion + ) {} + + private getBaseUrl() { + return `${this.API_BASE_URL}/api/v${this.version}`; + } + + private getRequestUrl(path: ValidPath) { + if (typeof path === 'string') { + return `${this.getBaseUrl()}/${path}`; + } else if (path.params) { + const searchParams = new URLSearchParams(); + for (const [key, value] of Object.entries(path.params)) { + if (Array.isArray(value)) { + for (const v of value) { + searchParams.append(key, v); + } + } else { + searchParams.append(key, value); + } + } + const paramStr = searchParams.toString(); + return `${this.getBaseUrl()}/${path.path}${paramStr ? `?${paramStr}` : ''}`; + } else { + return `${this.getBaseUrl()}/${path.path}`; + } + } + + private async fetch( + path: ValidPath, + method: ValidMethod, + jwt: string, + options?: RequestOptions + ) { + const requestUrl = this.getRequestUrl(path); + + const requestOptions: RequestInit = { + method, + headers: { + Authorization: `Bearer ${jwt}`, + 'Content-Type': 'application/json', + 'Code-Storage-Agent': getUserAgent(), + }, + }; + + if (method !== 'GET' && typeof path !== 'string' && path.body) { + requestOptions.body = JSON.stringify(path.body); + } + + const response = await fetch(requestUrl, requestOptions); + + if (!response.ok) { + const allowed = options?.allowedStatus ?? []; + if (allowed.includes(response.status)) { + return response; + } + + let errorBody: unknown; + let message: string | undefined; + const contentType = response.headers.get('content-type') ?? ''; + + try { + if (contentType.includes('application/json')) { + errorBody = await response.json(); + } else { + const text = await response.text(); + errorBody = text; + } + } catch { + // Fallback to plain text if JSON parse failed after reading body + try { + errorBody = await response.text(); + } catch { + errorBody = undefined; + } + } + + if (typeof errorBody === 'string') { + const trimmed = errorBody.trim(); + if (trimmed) { + message = trimmed; + } + } else if (errorBody && typeof errorBody === 'object') { + const parsedError = errorEnvelopeSchema.safeParse(errorBody); + if (parsedError.success) { + const trimmed = parsedError.data.error.trim(); + if (trimmed) { + message = trimmed; + } + } + } + + throw new ApiError({ + message: + message ?? + `Request ${method} ${requestUrl} failed with status ${response.status} ${response.statusText}`, + status: response.status, + statusText: response.statusText, + method, + url: requestUrl, + body: errorBody, + }); + } + return response; + } + + async get(path: ValidPath, jwt: string, options?: RequestOptions) { + return this.fetch(path, 'GET', jwt, options); + } + + async post(path: ValidPath, jwt: string, options?: RequestOptions) { + return this.fetch(path, 'POST', jwt, options); + } + + async put(path: ValidPath, jwt: string, options?: RequestOptions) { + return this.fetch(path, 'PUT', jwt, options); + } + + async delete(path: ValidPath, jwt: string, options?: RequestOptions) { + return this.fetch(path, 'DELETE', jwt, options); + } } diff --git a/packages/git-storage-sdk-node/src/index.ts b/packages/git-storage-sdk-node/src/index.ts index 070c5e36c..647d8eb80 100644 --- a/packages/git-storage-sdk-node/src/index.ts +++ b/packages/git-storage-sdk-node/src/index.ts @@ -3,86 +3,90 @@ * * A TypeScript SDK for interacting with Pierre's git storage system */ - -import { importPKCS8, SignJWT } from 'jose'; +import { SignJWT, importPKCS8 } from 'jose'; import snakecaseKeys from 'snakecase-keys'; -import { createCommitBuilder, FetchCommitTransport, resolveCommitTtlSeconds } from './commit'; + +import { + FetchCommitTransport, + createCommitBuilder, + resolveCommitTtlSeconds, +} from './commit'; import { FetchDiffCommitTransport, sendCommitFromDiff } from './diff-commit'; import { RefUpdateError } from './errors'; import { ApiError, ApiFetcher } from './fetch'; import type { RestoreCommitAckRaw } from './schemas'; import { - branchDiffResponseSchema, - commitDiffResponseSchema, - createBranchResponseSchema, - errorEnvelopeSchema, - grepResponseSchema, - listBranchesResponseSchema, - listCommitsResponseSchema, - listFilesResponseSchema, - listReposResponseSchema, - noteReadResponseSchema, - noteWriteResponseSchema, - restoreCommitAckSchema, - restoreCommitResponseSchema, + branchDiffResponseSchema, + commitDiffResponseSchema, + createBranchResponseSchema, + errorEnvelopeSchema, + grepResponseSchema, + listBranchesResponseSchema, + listCommitsResponseSchema, + listFilesResponseSchema, + listReposResponseSchema, + noteReadResponseSchema, + noteWriteResponseSchema, + restoreCommitAckSchema, + restoreCommitResponseSchema, } from './schemas'; import type { - AppendNoteOptions, - BranchInfo, - CommitBuilder, - CommitInfo, - CommitResult, - CreateBranchOptions, - CreateBranchResponse, - CreateBranchResult, - CreateCommitFromDiffOptions, - CreateCommitOptions, - CreateNoteOptions, - CreateRepoOptions, - DeleteNoteOptions, - DeleteRepoOptions, - DeleteRepoResult, - DiffFileState, - FileDiff, - FilteredFile, - FindOneOptions, - GetBranchDiffOptions, - GetBranchDiffResponse, - GetBranchDiffResult, - GetCommitDiffOptions, - GetCommitDiffResponse, - GetCommitDiffResult, - GetFileOptions, - GetNoteOptions, - GetNoteResult, - GetRemoteURLOptions, - GitStorageOptions, - GrepFileMatch, - GrepLine, - GrepOptions, - GrepResult, - ListBranchesOptions, - ListBranchesResponse, - ListBranchesResult, - ListCommitsOptions, - ListCommitsResponse, - ListCommitsResult, - ListFilesOptions, - ListFilesResult, - ListReposOptions, - ListReposResponse, - ListReposResult, - NoteWriteResult, - PullUpstreamOptions, - RawBranchInfo, - RawCommitInfo, - RawFileDiff, - RawFilteredFile, - RefUpdate, - Repo, - RestoreCommitOptions, - RestoreCommitResult, - ValidAPIVersion, + AppendNoteOptions, + BranchInfo, + CommitBuilder, + CommitInfo, + CommitResult, + CreateBranchOptions, + CreateBranchResponse, + CreateBranchResult, + CreateCommitFromDiffOptions, + CreateCommitOptions, + CreateNoteOptions, + CreateRepoOptions, + DeleteNoteOptions, + DeleteRepoOptions, + DeleteRepoResult, + DiffFileState, + FileDiff, + FilteredFile, + FindOneOptions, + GetBranchDiffOptions, + GetBranchDiffResponse, + GetBranchDiffResult, + GetCommitDiffOptions, + GetCommitDiffResponse, + GetCommitDiffResult, + GetFileOptions, + GetNoteOptions, + GetNoteResult, + GetRemoteURLOptions, + GitStorageOptions, + GrepFileMatch, + GrepLine, + GrepOptions, + GrepResult, + ListBranchesOptions, + ListBranchesResponse, + ListBranchesResult, + ListCommitsOptions, + ListCommitsResponse, + ListCommitsResult, + ListFilesOptions, + ListFilesResult, + ListReposOptions, + ListReposResponse, + ListReposResult, + NoteWriteResult, + PullUpstreamOptions, + RawBranchInfo, + RawCommitInfo, + RawFileDiff, + RawFilteredFile, + RefUpdate, + Repo, + RestoreCommitOptions, + RestoreCommitResult, + ValidAPIVersion, } from './types'; /** @@ -95,7 +99,11 @@ export { ApiError } from './fetch'; export * from './types'; // Export webhook validation utilities -export { parseSignatureHeader, validateWebhook, validateWebhookSignature } from './webhook'; +export { + parseSignatureHeader, + validateWebhook, + validateWebhookSignature, +} from './webhook'; /** * Git Storage API @@ -111,1277 +119,1387 @@ const API_VERSION: ValidAPIVersion = 1; const apiInstanceMap = new Map(); const DEFAULT_TOKEN_TTL_SECONDS = 60 * 60; // 1 hour const RESTORE_COMMIT_ALLOWED_STATUS = [ - 400, // Bad Request - validation errors - 401, // Unauthorized - missing/invalid auth header - 403, // Forbidden - missing git:write scope - 404, // Not Found - repo lookup failures - 408, // Request Timeout - client cancelled - 409, // Conflict - concurrent ref updates - 412, // Precondition Failed - optimistic concurrency - 422, // Unprocessable Entity - metadata issues - 429, // Too Many Requests - upstream throttling - 499, // Client Closed Request - storage cancellation - 500, // Internal Server Error - generic failure - 502, // Bad Gateway - storage/gateway bridge issues - 503, // Service Unavailable - storage selection failures - 504, // Gateway Timeout - long-running storage operations + 400, // Bad Request - validation errors + 401, // Unauthorized - missing/invalid auth header + 403, // Forbidden - missing git:write scope + 404, // Not Found - repo lookup failures + 408, // Request Timeout - client cancelled + 409, // Conflict - concurrent ref updates + 412, // Precondition Failed - optimistic concurrency + 422, // Unprocessable Entity - metadata issues + 429, // Too Many Requests - upstream throttling + 499, // Client Closed Request - storage cancellation + 500, // Internal Server Error - generic failure + 502, // Bad Gateway - storage/gateway bridge issues + 503, // Service Unavailable - storage selection failures + 504, // Gateway Timeout - long-running storage operations ] as const; const NOTE_WRITE_ALLOWED_STATUS = [ - 400, // Bad Request - validation errors - 401, // Unauthorized - missing/invalid auth header - 403, // Forbidden - missing git:write scope - 404, // Not Found - repo or note lookup failures - 408, // Request Timeout - client cancelled - 409, // Conflict - concurrent ref updates - 412, // Precondition Failed - optimistic concurrency - 422, // Unprocessable Entity - metadata issues - 429, // Too Many Requests - upstream throttling - 499, // Client Closed Request - storage cancellation - 500, // Internal Server Error - generic failure - 502, // Bad Gateway - storage/gateway bridge issues - 503, // Service Unavailable - storage selection failures - 504, // Gateway Timeout - long-running storage operations + 400, // Bad Request - validation errors + 401, // Unauthorized - missing/invalid auth header + 403, // Forbidden - missing git:write scope + 404, // Not Found - repo or note lookup failures + 408, // Request Timeout - client cancelled + 409, // Conflict - concurrent ref updates + 412, // Precondition Failed - optimistic concurrency + 422, // Unprocessable Entity - metadata issues + 429, // Too Many Requests - upstream throttling + 499, // Client Closed Request - storage cancellation + 500, // Internal Server Error - generic failure + 502, // Bad Gateway - storage/gateway bridge issues + 503, // Service Unavailable - storage selection failures + 504, // Gateway Timeout - long-running storage operations ] as const; function resolveInvocationTtlSeconds( - options?: { ttl?: number }, - defaultValue: number = DEFAULT_TOKEN_TTL_SECONDS, + options?: { ttl?: number }, + defaultValue: number = DEFAULT_TOKEN_TTL_SECONDS ): number { - if (typeof options?.ttl === 'number' && options.ttl > 0) { - return options.ttl; - } - return defaultValue; + if (typeof options?.ttl === 'number' && options.ttl > 0) { + return options.ttl; + } + return defaultValue; } type RestoreCommitAck = RestoreCommitAckRaw; function toRefUpdate(result: RestoreCommitAck['result']): RefUpdate { - return { - branch: result.branch, - oldSha: result.old_sha, - newSha: result.new_sha, - }; + return { + branch: result.branch, + oldSha: result.old_sha, + newSha: result.new_sha, + }; } function buildRestoreCommitResult(ack: RestoreCommitAck): RestoreCommitResult { - const refUpdate = toRefUpdate(ack.result); - if (!ack.result.success) { - throw new RefUpdateError( - ack.result.message ?? `Restore commit failed with status ${ack.result.status}`, - { - status: ack.result.status, - message: ack.result.message, - refUpdate, - }, - ); - } - return { - commitSha: ack.commit.commit_sha, - treeSha: ack.commit.tree_sha, - targetBranch: ack.commit.target_branch, - packBytes: ack.commit.pack_bytes, - refUpdate, - }; + const refUpdate = toRefUpdate(ack.result); + if (!ack.result.success) { + throw new RefUpdateError( + ack.result.message ?? + `Restore commit failed with status ${ack.result.status}`, + { + status: ack.result.status, + message: ack.result.message, + refUpdate, + } + ); + } + return { + commitSha: ack.commit.commit_sha, + treeSha: ack.commit.tree_sha, + targetBranch: ack.commit.target_branch, + packBytes: ack.commit.pack_bytes, + refUpdate, + }; } interface RestoreCommitFailureInfo { - status?: string; - message?: string; - refUpdate?: Partial; + status?: string; + message?: string; + refUpdate?: Partial; } function toPartialRefUpdate( - branch?: unknown, - oldSha?: unknown, - newSha?: unknown, + branch?: unknown, + oldSha?: unknown, + newSha?: unknown ): Partial | undefined { - const refUpdate: Partial = {}; - if (typeof branch === 'string' && branch.trim() !== '') { - refUpdate.branch = branch; - } - if (typeof oldSha === 'string' && oldSha.trim() !== '') { - refUpdate.oldSha = oldSha; - } - if (typeof newSha === 'string' && newSha.trim() !== '') { - refUpdate.newSha = newSha; - } - return Object.keys(refUpdate).length > 0 ? refUpdate : undefined; + const refUpdate: Partial = {}; + if (typeof branch === 'string' && branch.trim() !== '') { + refUpdate.branch = branch; + } + if (typeof oldSha === 'string' && oldSha.trim() !== '') { + refUpdate.oldSha = oldSha; + } + if (typeof newSha === 'string' && newSha.trim() !== '') { + refUpdate.newSha = newSha; + } + return Object.keys(refUpdate).length > 0 ? refUpdate : undefined; } function parseRestoreCommitPayload( - payload: unknown, + payload: unknown ): { ack: RestoreCommitAck } | { failure: RestoreCommitFailureInfo } | null { - const ack = restoreCommitAckSchema.safeParse(payload); - if (ack.success) { - return { ack: ack.data }; - } - - const failure = restoreCommitResponseSchema.safeParse(payload); - if (failure.success) { - const result = failure.data.result; - return { - failure: { - status: result.status, - message: result.message, - refUpdate: toPartialRefUpdate(result.branch, result.old_sha, result.new_sha), - }, - }; - } - - return null; + const ack = restoreCommitAckSchema.safeParse(payload); + if (ack.success) { + return { ack: ack.data }; + } + + const failure = restoreCommitResponseSchema.safeParse(payload); + if (failure.success) { + const result = failure.data.result; + return { + failure: { + status: result.status, + message: result.message, + refUpdate: toPartialRefUpdate( + result.branch, + result.old_sha, + result.new_sha + ), + }, + }; + } + + return null; } function httpStatusToRestoreStatus(status: number): string { - switch (status) { - case 409: - return 'conflict'; - case 412: - return 'precondition_failed'; - default: - return `${status}`; - } + switch (status) { + case 409: + return 'conflict'; + case 412: + return 'precondition_failed'; + default: + return `${status}`; + } } function getApiInstance(baseUrl: string, version: ValidAPIVersion) { - if (!apiInstanceMap.has(`${baseUrl}--${version}`)) { - apiInstanceMap.set(`${baseUrl}--${version}`, new ApiFetcher(baseUrl, version)); - } - return apiInstanceMap.get(`${baseUrl}--${version}`)!; + if (!apiInstanceMap.has(`${baseUrl}--${version}`)) { + apiInstanceMap.set( + `${baseUrl}--${version}`, + new ApiFetcher(baseUrl, version) + ); + } + return apiInstanceMap.get(`${baseUrl}--${version}`)!; } function transformBranchInfo(raw: RawBranchInfo): BranchInfo { - return { - cursor: raw.cursor, - name: raw.name, - headSha: raw.head_sha, - createdAt: raw.created_at, - }; + return { + cursor: raw.cursor, + name: raw.name, + headSha: raw.head_sha, + createdAt: raw.created_at, + }; } -function transformListBranchesResult(raw: ListBranchesResponse): ListBranchesResult { - return { - branches: raw.branches.map(transformBranchInfo), - nextCursor: raw.next_cursor ?? undefined, - hasMore: raw.has_more, - }; +function transformListBranchesResult( + raw: ListBranchesResponse +): ListBranchesResult { + return { + branches: raw.branches.map(transformBranchInfo), + nextCursor: raw.next_cursor ?? undefined, + hasMore: raw.has_more, + }; } function transformCommitInfo(raw: RawCommitInfo): CommitInfo { - const parsedDate = new Date(raw.date); - return { - sha: raw.sha, - message: raw.message, - authorName: raw.author_name, - authorEmail: raw.author_email, - committerName: raw.committer_name, - committerEmail: raw.committer_email, - date: parsedDate, - rawDate: raw.date, - }; + const parsedDate = new Date(raw.date); + return { + sha: raw.sha, + message: raw.message, + authorName: raw.author_name, + authorEmail: raw.author_email, + committerName: raw.committer_name, + committerEmail: raw.committer_email, + date: parsedDate, + rawDate: raw.date, + }; } -function transformListCommitsResult(raw: ListCommitsResponse): ListCommitsResult { - return { - commits: raw.commits.map(transformCommitInfo), - nextCursor: raw.next_cursor ?? undefined, - hasMore: raw.has_more, - }; +function transformListCommitsResult( + raw: ListCommitsResponse +): ListCommitsResult { + return { + commits: raw.commits.map(transformCommitInfo), + nextCursor: raw.next_cursor ?? undefined, + hasMore: raw.has_more, + }; } function normalizeDiffState(rawState: string): DiffFileState { - if (!rawState) { - return 'unknown'; - } - const leading = rawState.trim()[0]?.toUpperCase(); - switch (leading) { - case 'A': - return 'added'; - case 'M': - return 'modified'; - case 'D': - return 'deleted'; - case 'R': - return 'renamed'; - case 'C': - return 'copied'; - case 'T': - return 'type_changed'; - case 'U': - return 'unmerged'; - default: - return 'unknown'; - } + if (!rawState) { + return 'unknown'; + } + const leading = rawState.trim()[0]?.toUpperCase(); + switch (leading) { + case 'A': + return 'added'; + case 'M': + return 'modified'; + case 'D': + return 'deleted'; + case 'R': + return 'renamed'; + case 'C': + return 'copied'; + case 'T': + return 'type_changed'; + case 'U': + return 'unmerged'; + default: + return 'unknown'; + } } function transformFileDiff(raw: RawFileDiff): FileDiff { - const normalizedState = normalizeDiffState(raw.state); - return { - path: raw.path, - state: normalizedState, - rawState: raw.state, - oldPath: raw.old_path ?? undefined, - raw: raw.raw, - bytes: raw.bytes, - isEof: raw.is_eof, - }; + const normalizedState = normalizeDiffState(raw.state); + return { + path: raw.path, + state: normalizedState, + rawState: raw.state, + oldPath: raw.old_path ?? undefined, + raw: raw.raw, + bytes: raw.bytes, + isEof: raw.is_eof, + }; } function transformFilteredFile(raw: RawFilteredFile): FilteredFile { - const normalizedState = normalizeDiffState(raw.state); - return { - path: raw.path, - state: normalizedState, - rawState: raw.state, - oldPath: raw.old_path ?? undefined, - bytes: raw.bytes, - isEof: raw.is_eof, - }; + const normalizedState = normalizeDiffState(raw.state); + return { + path: raw.path, + state: normalizedState, + rawState: raw.state, + oldPath: raw.old_path ?? undefined, + bytes: raw.bytes, + isEof: raw.is_eof, + }; } -function transformBranchDiffResult(raw: GetBranchDiffResponse): GetBranchDiffResult { - return { - branch: raw.branch, - base: raw.base, - stats: raw.stats, - files: raw.files.map(transformFileDiff), - filteredFiles: raw.filtered_files.map(transformFilteredFile), - }; +function transformBranchDiffResult( + raw: GetBranchDiffResponse +): GetBranchDiffResult { + return { + branch: raw.branch, + base: raw.base, + stats: raw.stats, + files: raw.files.map(transformFileDiff), + filteredFiles: raw.filtered_files.map(transformFilteredFile), + }; } -function transformCommitDiffResult(raw: GetCommitDiffResponse): GetCommitDiffResult { - return { - sha: raw.sha, - stats: raw.stats, - files: raw.files.map(transformFileDiff), - filteredFiles: raw.filtered_files.map(transformFilteredFile), - }; +function transformCommitDiffResult( + raw: GetCommitDiffResponse +): GetCommitDiffResult { + return { + sha: raw.sha, + stats: raw.stats, + files: raw.files.map(transformFileDiff), + filteredFiles: raw.filtered_files.map(transformFilteredFile), + }; } -function transformCreateBranchResult(raw: CreateBranchResponse): CreateBranchResult { - return { - message: raw.message, - targetBranch: raw.target_branch, - targetIsEphemeral: raw.target_is_ephemeral, - commitSha: raw.commit_sha ?? undefined, - }; +function transformCreateBranchResult( + raw: CreateBranchResponse +): CreateBranchResult { + return { + message: raw.message, + targetBranch: raw.target_branch, + targetIsEphemeral: raw.target_is_ephemeral, + commitSha: raw.commit_sha ?? undefined, + }; } function transformListReposResult(raw: ListReposResponse): ListReposResult { - return { - repos: raw.repos.map((repo) => ({ - repoId: repo.repo_id, - url: repo.url, - defaultBranch: repo.default_branch, - createdAt: repo.created_at, - baseRepo: repo.base_repo - ? { - provider: repo.base_repo.provider, - owner: repo.base_repo.owner, - name: repo.base_repo.name, - } - : undefined, - })), - nextCursor: raw.next_cursor ?? undefined, - hasMore: raw.has_more, - }; + return { + repos: raw.repos.map((repo) => ({ + repoId: repo.repo_id, + url: repo.url, + defaultBranch: repo.default_branch, + createdAt: repo.created_at, + baseRepo: repo.base_repo + ? { + provider: repo.base_repo.provider, + owner: repo.base_repo.owner, + name: repo.base_repo.name, + } + : undefined, + })), + nextCursor: raw.next_cursor ?? undefined, + hasMore: raw.has_more, + }; } -function transformGrepLine(raw: { line_number: number; text: string; type: string }): GrepLine { - return { - lineNumber: raw.line_number, - text: raw.text, - type: raw.type, - }; +function transformGrepLine(raw: { + line_number: number; + text: string; + type: string; +}): GrepLine { + return { + lineNumber: raw.line_number, + text: raw.text, + type: raw.type, + }; } function transformGrepFileMatch(raw: { - path: string; - lines: { line_number: number; text: string; type: string }[]; + path: string; + lines: { line_number: number; text: string; type: string }[]; }): GrepFileMatch { - return { - path: raw.path, - lines: raw.lines.map(transformGrepLine), - }; + return { + path: raw.path, + lines: raw.lines.map(transformGrepLine), + }; } function transformNoteReadResult(raw: { - sha: string; - note: string; - ref_sha: string; + sha: string; + note: string; + ref_sha: string; }): GetNoteResult { - return { - sha: raw.sha, - note: raw.note, - refSha: raw.ref_sha, - }; + return { + sha: raw.sha, + note: raw.note, + refSha: raw.ref_sha, + }; } function transformNoteWriteResult(raw: { - sha: string; - target_ref: string; - base_commit?: string; - new_ref_sha: string; - result: { success: boolean; status: string; message?: string }; + sha: string; + target_ref: string; + base_commit?: string; + new_ref_sha: string; + result: { success: boolean; status: string; message?: string }; }): NoteWriteResult { - return { - sha: raw.sha, - targetRef: raw.target_ref, - baseCommit: raw.base_commit, - newRefSha: raw.new_ref_sha, - result: { - success: raw.result.success, - status: raw.result.status, - message: raw.result.message, - }, - }; + return { + sha: raw.sha, + targetRef: raw.target_ref, + baseCommit: raw.base_commit, + newRefSha: raw.new_ref_sha, + result: { + success: raw.result.success, + status: raw.result.status, + message: raw.result.message, + }, + }; } function buildNoteWriteBody( - sha: string, - note: string, - action: 'add' | 'append', - options: { expectedRefSha?: string; author?: { name: string; email: string } }, + sha: string, + note: string, + action: 'add' | 'append', + options: { expectedRefSha?: string; author?: { name: string; email: string } } ): Record { - const body: Record = { - sha, - action, - note, - }; - - const expectedRefSha = options.expectedRefSha?.trim(); - if (expectedRefSha) { - body.expected_ref_sha = expectedRefSha; - } - - if (options.author) { - const authorName = options.author.name?.trim(); - const authorEmail = options.author.email?.trim(); - if (!authorName || !authorEmail) { - throw new Error('note author name and email are required when provided'); - } - body.author = { - name: authorName, - email: authorEmail, - }; - } - - return body; + const body: Record = { + sha, + action, + note, + }; + + const expectedRefSha = options.expectedRefSha?.trim(); + if (expectedRefSha) { + body.expected_ref_sha = expectedRefSha; + } + + if (options.author) { + const authorName = options.author.name?.trim(); + const authorEmail = options.author.email?.trim(); + if (!authorName || !authorEmail) { + throw new Error('note author name and email are required when provided'); + } + body.author = { + name: authorName, + email: authorEmail, + }; + } + + return body; } async function parseNoteWriteResponse( - response: Response, - method: 'POST' | 'DELETE', + response: Response, + method: 'POST' | 'DELETE' ): Promise { - let jsonBody: unknown; - const contentType = response.headers.get('content-type') ?? ''; - try { - if (contentType.includes('application/json')) { - jsonBody = await response.json(); - } else { - jsonBody = await response.text(); - } - } catch { - jsonBody = undefined; - } - - if (jsonBody && typeof jsonBody === 'object') { - const parsed = noteWriteResponseSchema.safeParse(jsonBody); - if (parsed.success) { - return transformNoteWriteResult(parsed.data); - } - const parsedError = errorEnvelopeSchema.safeParse(jsonBody); - if (parsedError.success) { - throw new ApiError({ - message: parsedError.data.error, - status: response.status, - statusText: response.statusText, - method, - url: response.url, - body: jsonBody, - }); - } - } - - const fallbackMessage = - typeof jsonBody === 'string' && jsonBody.trim() !== '' - ? jsonBody.trim() - : `Request ${method} ${response.url} failed with status ${response.status} ${response.statusText}`; - - throw new ApiError({ - message: fallbackMessage, - status: response.status, - statusText: response.statusText, - method, - url: response.url, - body: jsonBody, - }); + let jsonBody: unknown; + const contentType = response.headers.get('content-type') ?? ''; + try { + if (contentType.includes('application/json')) { + jsonBody = await response.json(); + } else { + jsonBody = await response.text(); + } + } catch { + jsonBody = undefined; + } + + if (jsonBody && typeof jsonBody === 'object') { + const parsed = noteWriteResponseSchema.safeParse(jsonBody); + if (parsed.success) { + return transformNoteWriteResult(parsed.data); + } + const parsedError = errorEnvelopeSchema.safeParse(jsonBody); + if (parsedError.success) { + throw new ApiError({ + message: parsedError.data.error, + status: response.status, + statusText: response.statusText, + method, + url: response.url, + body: jsonBody, + }); + } + } + + const fallbackMessage = + typeof jsonBody === 'string' && jsonBody.trim() !== '' + ? jsonBody.trim() + : `Request ${method} ${response.url} failed with status ${response.status} ${response.statusText}`; + + throw new ApiError({ + message: fallbackMessage, + status: response.status, + statusText: response.statusText, + method, + url: response.url, + body: jsonBody, + }); } /** * Implementation of the Repo interface */ class RepoImpl implements Repo { - private readonly api: ApiFetcher; - - constructor( - public readonly id: string, - public readonly defaultBranch: string, - private readonly options: GitStorageOptions, - private readonly generateJWT: ( - repoId: string, - options?: GetRemoteURLOptions, - ) => Promise, - ) { - this.api = getApiInstance( - this.options.apiBaseUrl ?? GitStorage.getDefaultAPIBaseUrl(options.name), - this.options.apiVersion ?? API_VERSION, - ); - } - - async getRemoteURL(urlOptions?: GetRemoteURLOptions): Promise { - const url = new URL(`https://${this.options.storageBaseUrl}/${this.id}.git`); - url.username = `t`; - url.password = await this.generateJWT(this.id, urlOptions); - return url.toString(); - } - - async getEphemeralRemoteURL(urlOptions?: GetRemoteURLOptions): Promise { - const url = new URL(`https://${this.options.storageBaseUrl}/${this.id}+ephemeral.git`); - url.username = `t`; - url.password = await this.generateJWT(this.id, urlOptions); - return url.toString(); - } - - async getFileStream(options: GetFileOptions): Promise { - const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); - const jwt = await this.generateJWT(this.id, { - permissions: ['git:read'], - ttl, - }); - - const params: Record = { - path: options.path, - }; - - if (options.ref) { - params.ref = options.ref; - } - if (typeof options.ephemeral === 'boolean') { - params.ephemeral = String(options.ephemeral); - } - if (typeof options.ephemeralBase === 'boolean') { - params.ephemeral_base = String(options.ephemeralBase); - } - - // Return the raw fetch Response for streaming - return this.api.get({ path: 'repos/file', params }, jwt); - } - - async listFiles(options?: ListFilesOptions): Promise { - const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); - const jwt = await this.generateJWT(this.id, { - permissions: ['git:read'], - ttl, - }); - - const params: Record = {}; - if (options?.ref) { - params.ref = options.ref; - } - if (typeof options?.ephemeral === 'boolean') { - params.ephemeral = String(options.ephemeral); - } - const response = await this.api.get( - { path: 'repos/files', params: Object.keys(params).length ? params : undefined }, - jwt, - ); - - const raw = listFilesResponseSchema.parse(await response.json()); - return { paths: raw.paths, ref: raw.ref }; - } - - async listBranches(options?: ListBranchesOptions): Promise { - const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); - const jwt = await this.generateJWT(this.id, { - permissions: ['git:read'], - ttl, - }); - - const cursor = options?.cursor; - const limit = options?.limit; - - let params: Record | undefined; - - if (typeof cursor === 'string' || typeof limit === 'number') { - params = {}; - if (typeof cursor === 'string') { - params.cursor = cursor; - } - if (typeof limit === 'number') { - params.limit = limit.toString(); - } - } - - const response = await this.api.get({ path: 'repos/branches', params }, jwt); - - const raw = listBranchesResponseSchema.parse(await response.json()); - return transformListBranchesResult({ - ...raw, - next_cursor: raw.next_cursor ?? undefined, - }); - } - - async listCommits(options?: ListCommitsOptions): Promise { - const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); - const jwt = await this.generateJWT(this.id, { - permissions: ['git:read'], - ttl, - }); - - let params: Record | undefined; - - if (options?.branch || options?.cursor || options?.limit) { - params = {}; - if (options?.branch) { - params.branch = options.branch; - } - if (options?.cursor) { - params.cursor = options.cursor; - } - if (typeof options?.limit == 'number') { - params.limit = options.limit.toString(); - } - } - - const response = await this.api.get({ path: 'repos/commits', params }, jwt); - - const raw = listCommitsResponseSchema.parse(await response.json()); - return transformListCommitsResult({ - ...raw, - next_cursor: raw.next_cursor ?? undefined, - }); - } - - async getNote(options: GetNoteOptions): Promise { - const sha = options?.sha?.trim(); - if (!sha) { - throw new Error('getNote sha is required'); - } - - const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); - const jwt = await this.generateJWT(this.id, { - permissions: ['git:read'], - ttl, - }); - - const response = await this.api.get({ path: 'repos/notes', params: { sha } }, jwt); - const raw = noteReadResponseSchema.parse(await response.json()); - return transformNoteReadResult(raw); - } - - async createNote(options: CreateNoteOptions): Promise { - const sha = options?.sha?.trim(); - if (!sha) { - throw new Error('createNote sha is required'); - } - - const note = options?.note?.trim(); - if (!note) { - throw new Error('createNote note is required'); - } - - const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); - const jwt = await this.generateJWT(this.id, { - permissions: ['git:write'], - ttl, - }); - - const body = buildNoteWriteBody(sha, note, 'add', { - expectedRefSha: options.expectedRefSha, - author: options.author, - }); - - const response = await this.api.post({ path: 'repos/notes', body }, jwt, { - allowedStatus: [...NOTE_WRITE_ALLOWED_STATUS], - }); - - const result = await parseNoteWriteResponse(response, 'POST'); - if (!result.result.success) { - throw new RefUpdateError( - result.result.message ?? `createNote failed with status ${result.result.status}`, - { - status: result.result.status, - message: result.result.message, - refUpdate: toPartialRefUpdate(result.targetRef, result.baseCommit, result.newRefSha), - }, - ); - } - return result; - } - - async appendNote(options: AppendNoteOptions): Promise { - const sha = options?.sha?.trim(); - if (!sha) { - throw new Error('appendNote sha is required'); - } - - const note = options?.note?.trim(); - if (!note) { - throw new Error('appendNote note is required'); - } - - const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); - const jwt = await this.generateJWT(this.id, { - permissions: ['git:write'], - ttl, - }); - - const body = buildNoteWriteBody(sha, note, 'append', { - expectedRefSha: options.expectedRefSha, - author: options.author, - }); - - const response = await this.api.post({ path: 'repos/notes', body }, jwt, { - allowedStatus: [...NOTE_WRITE_ALLOWED_STATUS], - }); - - const result = await parseNoteWriteResponse(response, 'POST'); - if (!result.result.success) { - throw new RefUpdateError( - result.result.message ?? `appendNote failed with status ${result.result.status}`, - { - status: result.result.status, - message: result.result.message, - refUpdate: toPartialRefUpdate(result.targetRef, result.baseCommit, result.newRefSha), - }, - ); - } - return result; - } - - async deleteNote(options: DeleteNoteOptions): Promise { - const sha = options?.sha?.trim(); - if (!sha) { - throw new Error('deleteNote sha is required'); - } - - const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); - const jwt = await this.generateJWT(this.id, { - permissions: ['git:write'], - ttl, - }); - - const body: Record = { - sha, - }; - - const expectedRefSha = options.expectedRefSha?.trim(); - if (expectedRefSha) { - body.expected_ref_sha = expectedRefSha; - } - - if (options.author) { - const authorName = options.author.name?.trim(); - const authorEmail = options.author.email?.trim(); - if (!authorName || !authorEmail) { - throw new Error('deleteNote author name and email are required when provided'); - } - body.author = { - name: authorName, - email: authorEmail, - }; - } - - const response = await this.api.delete({ path: 'repos/notes', body }, jwt, { - allowedStatus: [...NOTE_WRITE_ALLOWED_STATUS], - }); - - const result = await parseNoteWriteResponse(response, 'DELETE'); - if (!result.result.success) { - throw new RefUpdateError( - result.result.message ?? `deleteNote failed with status ${result.result.status}`, - { - status: result.result.status, - message: result.result.message, - refUpdate: toPartialRefUpdate(result.targetRef, result.baseCommit, result.newRefSha), - }, - ); - } - return result; - } - - async getBranchDiff(options: GetBranchDiffOptions): Promise { - const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); - const jwt = await this.generateJWT(this.id, { - permissions: ['git:read'], - ttl, - }); - - const params: Record = { - branch: options.branch, - }; - - if (options.base) { - params.base = options.base; - } - if (typeof options.ephemeral === 'boolean') { - params.ephemeral = String(options.ephemeral); - } - if (typeof options.ephemeralBase === 'boolean') { - params.ephemeral_base = String(options.ephemeralBase); - } - if (options.paths && options.paths.length > 0) { - params.path = options.paths; - } - - const response = await this.api.get({ path: 'repos/branches/diff', params }, jwt); - - const raw = branchDiffResponseSchema.parse(await response.json()); - return transformBranchDiffResult(raw); - } - - async getCommitDiff(options: GetCommitDiffOptions): Promise { - const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); - const jwt = await this.generateJWT(this.id, { - permissions: ['git:read'], - ttl, - }); - - const params: Record = { - sha: options.sha, - }; - - if (options.baseSha) { - params.baseSha = options.baseSha; - } - if (options.paths && options.paths.length > 0) { - params.path = options.paths; - } - - const response = await this.api.get({ path: 'repos/diff', params }, jwt); - - const raw = commitDiffResponseSchema.parse(await response.json()); - return transformCommitDiffResult(raw); - } - - async grep(options: GrepOptions): Promise { - const pattern = options?.query?.pattern?.trim(); - if (!pattern) { - throw new Error('grep query.pattern is required'); - } - - const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); - const jwt = await this.generateJWT(this.id, { - permissions: ['git:read'], - ttl, - }); - - const body: Record = { - query: { - pattern, - ...(typeof options.query.caseSensitive === 'boolean' - ? { case_sensitive: options.query.caseSensitive } - : {}), - }, - }; - - if (options.ref) { - body.rev = options.ref; - } - if (Array.isArray(options.paths) && options.paths.length > 0) { - body.paths = options.paths; - } - if (options.fileFilters) { - body.file_filters = { - ...(options.fileFilters.includeGlobs - ? { include_globs: options.fileFilters.includeGlobs } - : {}), - ...(options.fileFilters.excludeGlobs - ? { exclude_globs: options.fileFilters.excludeGlobs } - : {}), - ...(options.fileFilters.extensionFilters - ? { extension_filters: options.fileFilters.extensionFilters } - : {}), - }; - } - if (options.context) { - body.context = { - ...(typeof options.context.before === 'number' ? { before: options.context.before } : {}), - ...(typeof options.context.after === 'number' ? { after: options.context.after } : {}), - }; - } - if (options.limits) { - body.limits = { - ...(typeof options.limits.maxLines === 'number' - ? { max_lines: options.limits.maxLines } - : {}), - ...(typeof options.limits.maxMatchesPerFile === 'number' - ? { max_matches_per_file: options.limits.maxMatchesPerFile } - : {}), - }; - } - if (options.pagination) { - body.pagination = { - ...(typeof options.pagination.cursor === 'string' && options.pagination.cursor.trim() !== '' - ? { cursor: options.pagination.cursor } - : {}), - ...(typeof options.pagination.limit === 'number' - ? { limit: options.pagination.limit } - : {}), - }; - } - - const response = await this.api.post({ path: 'repos/grep', body }, jwt); - const raw = grepResponseSchema.parse(await response.json()); - - return { - query: { - pattern: raw.query.pattern, - caseSensitive: raw.query.case_sensitive, - }, - repo: { - ref: raw.repo.ref, - commit: raw.repo.commit, - }, - matches: raw.matches.map(transformGrepFileMatch), - nextCursor: raw.next_cursor ?? undefined, - hasMore: raw.has_more, - }; - } - - async pullUpstream(options: PullUpstreamOptions = {}): Promise { - const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); - const jwt = await this.generateJWT(this.id, { - permissions: ['git:write'], - ttl, - }); - - const body: Record = {}; - - if (options.ref) { - body.ref = options.ref; - } - - const response = await this.api.post({ path: 'repos/pull-upstream', body }, jwt); - - if (response.status !== 202) { - throw new Error(`Pull Upstream failed: ${response.status} ${await response.text()}`); - } - - return; - } - - async createBranch(options: CreateBranchOptions): Promise { - const baseBranch = options?.baseBranch?.trim(); - if (!baseBranch) { - throw new Error('createBranch baseBranch is required'); - } - const targetBranch = options?.targetBranch?.trim(); - if (!targetBranch) { - throw new Error('createBranch targetBranch is required'); - } - - const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); - const jwt = await this.generateJWT(this.id, { - permissions: ['git:write'], - ttl, - }); - - const body: Record = { - base_branch: baseBranch, - target_branch: targetBranch, - }; - - if (options.baseIsEphemeral === true) { - body.base_is_ephemeral = true; - } - if (options.targetIsEphemeral === true) { - body.target_is_ephemeral = true; - } - - const response = await this.api.post({ path: 'repos/branches/create', body }, jwt); - const raw = createBranchResponseSchema.parse(await response.json()); - return transformCreateBranchResult(raw); - } - - async restoreCommit(options: RestoreCommitOptions): Promise { - const targetBranch = options?.targetBranch?.trim(); - if (!targetBranch) { - throw new Error('restoreCommit targetBranch is required'); - } - if (targetBranch.startsWith('refs/')) { - throw new Error('restoreCommit targetBranch must not include refs/ prefix'); - } - - const targetCommitSha = options?.targetCommitSha?.trim(); - if (!targetCommitSha) { - throw new Error('restoreCommit targetCommitSha is required'); - } - const commitMessage = options?.commitMessage?.trim(); - - const authorName = options.author?.name?.trim(); - const authorEmail = options.author?.email?.trim(); - if (!authorName || !authorEmail) { - throw new Error('restoreCommit author name and email are required'); - } - - const ttl = resolveCommitTtlSeconds(options); - const jwt = await this.generateJWT(this.id, { - permissions: ['git:write'], - ttl, - }); - - const metadata: Record = { - target_branch: targetBranch, - target_commit_sha: targetCommitSha, - author: { - name: authorName, - email: authorEmail, - }, - }; - - if (commitMessage) { - metadata.commit_message = commitMessage; - } - - const expectedHeadSha = options.expectedHeadSha?.trim(); - if (expectedHeadSha) { - metadata.expected_head_sha = expectedHeadSha; - } - - if (options.committer) { - const committerName = options.committer.name?.trim(); - const committerEmail = options.committer.email?.trim(); - if (!committerName || !committerEmail) { - throw new Error('restoreCommit committer name and email are required when provided'); - } - metadata.committer = { - name: committerName, - email: committerEmail, - }; - } - - const response = await this.api.post( - { path: 'repos/restore-commit', body: { metadata } }, - jwt, - { - allowedStatus: [...RESTORE_COMMIT_ALLOWED_STATUS], - }, - ); - - const payload = await response.json(); - const parsed = parseRestoreCommitPayload(payload); - if (parsed && 'ack' in parsed) { - return buildRestoreCommitResult(parsed.ack); - } - - const failure = parsed && 'failure' in parsed ? parsed.failure : undefined; - const status = failure?.status ?? httpStatusToRestoreStatus(response.status); - const message = - failure?.message ?? - `Restore commit failed with HTTP ${response.status}` + - (response.statusText ? ` ${response.statusText}` : ''); - - throw new RefUpdateError(message, { - status, - refUpdate: failure?.refUpdate, - }); - } - - createCommit(options: CreateCommitOptions): CommitBuilder { - const version = this.options.apiVersion ?? API_VERSION; - const baseUrl = this.options.apiBaseUrl ?? GitStorage.getDefaultAPIBaseUrl(this.options.name); - const transport = new FetchCommitTransport({ baseUrl, version }); - const ttl = resolveCommitTtlSeconds(options); - const builderOptions: CreateCommitOptions = { - ...options, - ttl, - }; - const getAuthToken = () => - this.generateJWT(this.id, { - permissions: ['git:write'], - ttl, - }); - - return createCommitBuilder({ - options: builderOptions, - getAuthToken, - transport, - }); - } - - async createCommitFromDiff(options: CreateCommitFromDiffOptions): Promise { - const version = this.options.apiVersion ?? API_VERSION; - const baseUrl = this.options.apiBaseUrl ?? GitStorage.getDefaultAPIBaseUrl(this.options.name); - const transport = new FetchDiffCommitTransport({ baseUrl, version }); - const ttl = resolveCommitTtlSeconds(options); - const requestOptions: CreateCommitFromDiffOptions = { - ...options, - ttl, - }; - const getAuthToken = () => - this.generateJWT(this.id, { - permissions: ['git:write'], - ttl, - }); - - return sendCommitFromDiff({ - options: requestOptions, - getAuthToken, - transport, - }); - } + private readonly api: ApiFetcher; + + constructor( + public readonly id: string, + public readonly defaultBranch: string, + private readonly options: GitStorageOptions, + private readonly generateJWT: ( + repoId: string, + options?: GetRemoteURLOptions + ) => Promise + ) { + this.api = getApiInstance( + this.options.apiBaseUrl ?? GitStorage.getDefaultAPIBaseUrl(options.name), + this.options.apiVersion ?? API_VERSION + ); + } + + async getRemoteURL(urlOptions?: GetRemoteURLOptions): Promise { + const url = new URL( + `https://${this.options.storageBaseUrl}/${this.id}.git` + ); + url.username = `t`; + url.password = await this.generateJWT(this.id, urlOptions); + return url.toString(); + } + + async getEphemeralRemoteURL( + urlOptions?: GetRemoteURLOptions + ): Promise { + const url = new URL( + `https://${this.options.storageBaseUrl}/${this.id}+ephemeral.git` + ); + url.username = `t`; + url.password = await this.generateJWT(this.id, urlOptions); + return url.toString(); + } + + async getFileStream(options: GetFileOptions): Promise { + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:read'], + ttl, + }); + + const params: Record = { + path: options.path, + }; + + if (options.ref) { + params.ref = options.ref; + } + if (typeof options.ephemeral === 'boolean') { + params.ephemeral = String(options.ephemeral); + } + if (typeof options.ephemeralBase === 'boolean') { + params.ephemeral_base = String(options.ephemeralBase); + } + + // Return the raw fetch Response for streaming + return this.api.get({ path: 'repos/file', params }, jwt); + } + + async listFiles(options?: ListFilesOptions): Promise { + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:read'], + ttl, + }); + + const params: Record = {}; + if (options?.ref) { + params.ref = options.ref; + } + if (typeof options?.ephemeral === 'boolean') { + params.ephemeral = String(options.ephemeral); + } + const response = await this.api.get( + { + path: 'repos/files', + params: Object.keys(params).length ? params : undefined, + }, + jwt + ); + + const raw = listFilesResponseSchema.parse(await response.json()); + return { paths: raw.paths, ref: raw.ref }; + } + + async listBranches( + options?: ListBranchesOptions + ): Promise { + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:read'], + ttl, + }); + + const cursor = options?.cursor; + const limit = options?.limit; + + let params: Record | undefined; + + if (typeof cursor === 'string' || typeof limit === 'number') { + params = {}; + if (typeof cursor === 'string') { + params.cursor = cursor; + } + if (typeof limit === 'number') { + params.limit = limit.toString(); + } + } + + const response = await this.api.get( + { path: 'repos/branches', params }, + jwt + ); + + const raw = listBranchesResponseSchema.parse(await response.json()); + return transformListBranchesResult({ + ...raw, + next_cursor: raw.next_cursor ?? undefined, + }); + } + + async listCommits(options?: ListCommitsOptions): Promise { + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:read'], + ttl, + }); + + let params: Record | undefined; + + if (options?.branch || options?.cursor || options?.limit) { + params = {}; + if (options?.branch) { + params.branch = options.branch; + } + if (options?.cursor) { + params.cursor = options.cursor; + } + if (typeof options?.limit == 'number') { + params.limit = options.limit.toString(); + } + } + + const response = await this.api.get({ path: 'repos/commits', params }, jwt); + + const raw = listCommitsResponseSchema.parse(await response.json()); + return transformListCommitsResult({ + ...raw, + next_cursor: raw.next_cursor ?? undefined, + }); + } + + async getNote(options: GetNoteOptions): Promise { + const sha = options?.sha?.trim(); + if (!sha) { + throw new Error('getNote sha is required'); + } + + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:read'], + ttl, + }); + + const response = await this.api.get( + { path: 'repos/notes', params: { sha } }, + jwt + ); + const raw = noteReadResponseSchema.parse(await response.json()); + return transformNoteReadResult(raw); + } + + async createNote(options: CreateNoteOptions): Promise { + const sha = options?.sha?.trim(); + if (!sha) { + throw new Error('createNote sha is required'); + } + + const note = options?.note?.trim(); + if (!note) { + throw new Error('createNote note is required'); + } + + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:write'], + ttl, + }); + + const body = buildNoteWriteBody(sha, note, 'add', { + expectedRefSha: options.expectedRefSha, + author: options.author, + }); + + const response = await this.api.post({ path: 'repos/notes', body }, jwt, { + allowedStatus: [...NOTE_WRITE_ALLOWED_STATUS], + }); + + const result = await parseNoteWriteResponse(response, 'POST'); + if (!result.result.success) { + throw new RefUpdateError( + result.result.message ?? + `createNote failed with status ${result.result.status}`, + { + status: result.result.status, + message: result.result.message, + refUpdate: toPartialRefUpdate( + result.targetRef, + result.baseCommit, + result.newRefSha + ), + } + ); + } + return result; + } + + async appendNote(options: AppendNoteOptions): Promise { + const sha = options?.sha?.trim(); + if (!sha) { + throw new Error('appendNote sha is required'); + } + + const note = options?.note?.trim(); + if (!note) { + throw new Error('appendNote note is required'); + } + + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:write'], + ttl, + }); + + const body = buildNoteWriteBody(sha, note, 'append', { + expectedRefSha: options.expectedRefSha, + author: options.author, + }); + + const response = await this.api.post({ path: 'repos/notes', body }, jwt, { + allowedStatus: [...NOTE_WRITE_ALLOWED_STATUS], + }); + + const result = await parseNoteWriteResponse(response, 'POST'); + if (!result.result.success) { + throw new RefUpdateError( + result.result.message ?? + `appendNote failed with status ${result.result.status}`, + { + status: result.result.status, + message: result.result.message, + refUpdate: toPartialRefUpdate( + result.targetRef, + result.baseCommit, + result.newRefSha + ), + } + ); + } + return result; + } + + async deleteNote(options: DeleteNoteOptions): Promise { + const sha = options?.sha?.trim(); + if (!sha) { + throw new Error('deleteNote sha is required'); + } + + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:write'], + ttl, + }); + + const body: Record = { + sha, + }; + + const expectedRefSha = options.expectedRefSha?.trim(); + if (expectedRefSha) { + body.expected_ref_sha = expectedRefSha; + } + + if (options.author) { + const authorName = options.author.name?.trim(); + const authorEmail = options.author.email?.trim(); + if (!authorName || !authorEmail) { + throw new Error( + 'deleteNote author name and email are required when provided' + ); + } + body.author = { + name: authorName, + email: authorEmail, + }; + } + + const response = await this.api.delete({ path: 'repos/notes', body }, jwt, { + allowedStatus: [...NOTE_WRITE_ALLOWED_STATUS], + }); + + const result = await parseNoteWriteResponse(response, 'DELETE'); + if (!result.result.success) { + throw new RefUpdateError( + result.result.message ?? + `deleteNote failed with status ${result.result.status}`, + { + status: result.result.status, + message: result.result.message, + refUpdate: toPartialRefUpdate( + result.targetRef, + result.baseCommit, + result.newRefSha + ), + } + ); + } + return result; + } + + async getBranchDiff( + options: GetBranchDiffOptions + ): Promise { + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:read'], + ttl, + }); + + const params: Record = { + branch: options.branch, + }; + + if (options.base) { + params.base = options.base; + } + if (typeof options.ephemeral === 'boolean') { + params.ephemeral = String(options.ephemeral); + } + if (typeof options.ephemeralBase === 'boolean') { + params.ephemeral_base = String(options.ephemeralBase); + } + if (options.paths && options.paths.length > 0) { + params.path = options.paths; + } + + const response = await this.api.get( + { path: 'repos/branches/diff', params }, + jwt + ); + + const raw = branchDiffResponseSchema.parse(await response.json()); + return transformBranchDiffResult(raw); + } + + async getCommitDiff( + options: GetCommitDiffOptions + ): Promise { + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:read'], + ttl, + }); + + const params: Record = { + sha: options.sha, + }; + + if (options.baseSha) { + params.baseSha = options.baseSha; + } + if (options.paths && options.paths.length > 0) { + params.path = options.paths; + } + + const response = await this.api.get({ path: 'repos/diff', params }, jwt); + + const raw = commitDiffResponseSchema.parse(await response.json()); + return transformCommitDiffResult(raw); + } + + async grep(options: GrepOptions): Promise { + const pattern = options?.query?.pattern?.trim(); + if (!pattern) { + throw new Error('grep query.pattern is required'); + } + + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:read'], + ttl, + }); + + const body: Record = { + query: { + pattern, + ...(typeof options.query.caseSensitive === 'boolean' + ? { case_sensitive: options.query.caseSensitive } + : {}), + }, + }; + + if (options.ref) { + body.rev = options.ref; + } + if (Array.isArray(options.paths) && options.paths.length > 0) { + body.paths = options.paths; + } + if (options.fileFilters) { + body.file_filters = { + ...(options.fileFilters.includeGlobs + ? { include_globs: options.fileFilters.includeGlobs } + : {}), + ...(options.fileFilters.excludeGlobs + ? { exclude_globs: options.fileFilters.excludeGlobs } + : {}), + ...(options.fileFilters.extensionFilters + ? { extension_filters: options.fileFilters.extensionFilters } + : {}), + }; + } + if (options.context) { + body.context = { + ...(typeof options.context.before === 'number' + ? { before: options.context.before } + : {}), + ...(typeof options.context.after === 'number' + ? { after: options.context.after } + : {}), + }; + } + if (options.limits) { + body.limits = { + ...(typeof options.limits.maxLines === 'number' + ? { max_lines: options.limits.maxLines } + : {}), + ...(typeof options.limits.maxMatchesPerFile === 'number' + ? { max_matches_per_file: options.limits.maxMatchesPerFile } + : {}), + }; + } + if (options.pagination) { + body.pagination = { + ...(typeof options.pagination.cursor === 'string' && + options.pagination.cursor.trim() !== '' + ? { cursor: options.pagination.cursor } + : {}), + ...(typeof options.pagination.limit === 'number' + ? { limit: options.pagination.limit } + : {}), + }; + } + + const response = await this.api.post({ path: 'repos/grep', body }, jwt); + const raw = grepResponseSchema.parse(await response.json()); + + return { + query: { + pattern: raw.query.pattern, + caseSensitive: raw.query.case_sensitive, + }, + repo: { + ref: raw.repo.ref, + commit: raw.repo.commit, + }, + matches: raw.matches.map(transformGrepFileMatch), + nextCursor: raw.next_cursor ?? undefined, + hasMore: raw.has_more, + }; + } + + async pullUpstream(options: PullUpstreamOptions = {}): Promise { + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:write'], + ttl, + }); + + const body: Record = {}; + + if (options.ref) { + body.ref = options.ref; + } + + const response = await this.api.post( + { path: 'repos/pull-upstream', body }, + jwt + ); + + if (response.status !== 202) { + throw new Error( + `Pull Upstream failed: ${response.status} ${await response.text()}` + ); + } + + return; + } + + async createBranch( + options: CreateBranchOptions + ): Promise { + const baseBranch = options?.baseBranch?.trim(); + if (!baseBranch) { + throw new Error('createBranch baseBranch is required'); + } + const targetBranch = options?.targetBranch?.trim(); + if (!targetBranch) { + throw new Error('createBranch targetBranch is required'); + } + + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:write'], + ttl, + }); + + const body: Record = { + base_branch: baseBranch, + target_branch: targetBranch, + }; + + if (options.baseIsEphemeral === true) { + body.base_is_ephemeral = true; + } + if (options.targetIsEphemeral === true) { + body.target_is_ephemeral = true; + } + + const response = await this.api.post( + { path: 'repos/branches/create', body }, + jwt + ); + const raw = createBranchResponseSchema.parse(await response.json()); + return transformCreateBranchResult(raw); + } + + async restoreCommit( + options: RestoreCommitOptions + ): Promise { + const targetBranch = options?.targetBranch?.trim(); + if (!targetBranch) { + throw new Error('restoreCommit targetBranch is required'); + } + if (targetBranch.startsWith('refs/')) { + throw new Error( + 'restoreCommit targetBranch must not include refs/ prefix' + ); + } + + const targetCommitSha = options?.targetCommitSha?.trim(); + if (!targetCommitSha) { + throw new Error('restoreCommit targetCommitSha is required'); + } + const commitMessage = options?.commitMessage?.trim(); + + const authorName = options.author?.name?.trim(); + const authorEmail = options.author?.email?.trim(); + if (!authorName || !authorEmail) { + throw new Error('restoreCommit author name and email are required'); + } + + const ttl = resolveCommitTtlSeconds(options); + const jwt = await this.generateJWT(this.id, { + permissions: ['git:write'], + ttl, + }); + + const metadata: Record = { + target_branch: targetBranch, + target_commit_sha: targetCommitSha, + author: { + name: authorName, + email: authorEmail, + }, + }; + + if (commitMessage) { + metadata.commit_message = commitMessage; + } + + const expectedHeadSha = options.expectedHeadSha?.trim(); + if (expectedHeadSha) { + metadata.expected_head_sha = expectedHeadSha; + } + + if (options.committer) { + const committerName = options.committer.name?.trim(); + const committerEmail = options.committer.email?.trim(); + if (!committerName || !committerEmail) { + throw new Error( + 'restoreCommit committer name and email are required when provided' + ); + } + metadata.committer = { + name: committerName, + email: committerEmail, + }; + } + + const response = await this.api.post( + { path: 'repos/restore-commit', body: { metadata } }, + jwt, + { + allowedStatus: [...RESTORE_COMMIT_ALLOWED_STATUS], + } + ); + + const payload = await response.json(); + const parsed = parseRestoreCommitPayload(payload); + if (parsed && 'ack' in parsed) { + return buildRestoreCommitResult(parsed.ack); + } + + const failure = parsed && 'failure' in parsed ? parsed.failure : undefined; + const status = + failure?.status ?? httpStatusToRestoreStatus(response.status); + const message = + failure?.message ?? + `Restore commit failed with HTTP ${response.status}` + + (response.statusText ? ` ${response.statusText}` : ''); + + throw new RefUpdateError(message, { + status, + refUpdate: failure?.refUpdate, + }); + } + + createCommit(options: CreateCommitOptions): CommitBuilder { + const version = this.options.apiVersion ?? API_VERSION; + const baseUrl = + this.options.apiBaseUrl ?? + GitStorage.getDefaultAPIBaseUrl(this.options.name); + const transport = new FetchCommitTransport({ baseUrl, version }); + const ttl = resolveCommitTtlSeconds(options); + const builderOptions: CreateCommitOptions = { + ...options, + ttl, + }; + const getAuthToken = () => + this.generateJWT(this.id, { + permissions: ['git:write'], + ttl, + }); + + return createCommitBuilder({ + options: builderOptions, + getAuthToken, + transport, + }); + } + + async createCommitFromDiff( + options: CreateCommitFromDiffOptions + ): Promise { + const version = this.options.apiVersion ?? API_VERSION; + const baseUrl = + this.options.apiBaseUrl ?? + GitStorage.getDefaultAPIBaseUrl(this.options.name); + const transport = new FetchDiffCommitTransport({ baseUrl, version }); + const ttl = resolveCommitTtlSeconds(options); + const requestOptions: CreateCommitFromDiffOptions = { + ...options, + ttl, + }; + const getAuthToken = () => + this.generateJWT(this.id, { + permissions: ['git:write'], + ttl, + }); + + return sendCommitFromDiff({ + options: requestOptions, + getAuthToken, + transport, + }); + } } export class GitStorage { - private options: GitStorageOptions; - private api: ApiFetcher; - - constructor(options: GitStorageOptions) { - if ( - !options || - options.name === undefined || - options.key === undefined || - options.name === null || - options.key === null - ) { - throw new Error( - 'GitStorage requires a name and key. Please check your configuration and try again.', - ); - } - - if (typeof options.name !== 'string' || options.name.trim() === '') { - throw new Error('GitStorage name must be a non-empty string.'); - } - - if (typeof options.key !== 'string' || options.key.trim() === '') { - throw new Error('GitStorage key must be a non-empty string.'); - } - - const resolvedApiBaseUrl = options.apiBaseUrl ?? GitStorage.getDefaultAPIBaseUrl(options.name); - const resolvedApiVersion = options.apiVersion ?? API_VERSION; - const resolvedStorageBaseUrl = - options.storageBaseUrl ?? GitStorage.getDefaultStorageBaseUrl(options.name); - const resolvedDefaultTtl = options.defaultTTL; - - this.api = getApiInstance(resolvedApiBaseUrl, resolvedApiVersion); - - this.options = { - key: options.key, - name: options.name, - apiBaseUrl: resolvedApiBaseUrl, - apiVersion: resolvedApiVersion, - storageBaseUrl: resolvedStorageBaseUrl, - defaultTTL: resolvedDefaultTtl, - }; - } - - static getDefaultAPIBaseUrl(name: string): string { - return API_BASE_URL.replace('{{org}}', name); - } - - static getDefaultStorageBaseUrl(name: string): string { - return STORAGE_BASE_URL.replace('{{org}}', name); - } - - /** - * Create a new repository - * @returns The created repository - */ - async createRepo(options?: CreateRepoOptions): Promise { - const repoId = options?.id || crypto.randomUUID(); - const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); - const jwt = await this.generateJWT(repoId, { - permissions: ['repo:write'], - ttl, - }); - - const baseRepo = options?.baseRepo; - const isFork = baseRepo ? 'id' in baseRepo : false; - let baseRepoOptions: Record | null = null; - let resolvedDefaultBranch: string | undefined; - - if (baseRepo) { - if ('id' in baseRepo) { - const baseRepoToken = await this.generateJWT(baseRepo.id, { - permissions: ['git:read'], - ttl, - }); - baseRepoOptions = { - provider: 'code', - owner: this.options.name, - name: baseRepo.id, - operation: 'fork', - auth: { token: baseRepoToken }, - ...(baseRepo.ref ? { ref: baseRepo.ref } : {}), - ...(baseRepo.sha ? { sha: baseRepo.sha } : {}), - }; - } else { - baseRepoOptions = { - provider: 'github', - ...snakecaseKeys(baseRepo as unknown as Record), - }; - resolvedDefaultBranch = baseRepo.defaultBranch; - } - } - - // Match backend priority: baseRepo.defaultBranch > options.defaultBranch > 'main' - if (!resolvedDefaultBranch) { - if (options?.defaultBranch) { - resolvedDefaultBranch = options.defaultBranch; - } else if (!isFork) { - resolvedDefaultBranch = 'main'; - } - } - - const createRepoPath = - baseRepoOptions || resolvedDefaultBranch - ? { - path: 'repos', - body: { - ...(baseRepoOptions && { base_repo: baseRepoOptions }), - ...(resolvedDefaultBranch && { default_branch: resolvedDefaultBranch }), - }, - } - : 'repos'; - - // Allow 409 so we can map it to a clearer error message - const resp = await this.api.post(createRepoPath, jwt, { allowedStatus: [409] }); - if (resp.status === 409) { - throw new Error('Repository already exists'); - } - - return new RepoImpl( - repoId, - resolvedDefaultBranch ?? 'main', - this.options, - this.generateJWT.bind(this), - ); - } - - /** - * List repositories for the authenticated organization - * @returns Paginated repositories list - */ - async listRepos(options?: ListReposOptions): Promise { - const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); - const jwt = await this.generateJWT('org', { - permissions: ['org:read'], - ttl, - }); - - let params: Record | undefined; - if (options?.cursor || typeof options?.limit === 'number') { - params = {}; - if (options.cursor) { - params.cursor = options.cursor; - } - if (typeof options.limit === 'number') { - params.limit = options.limit.toString(); - } - } - - const response = await this.api.get({ path: 'repos', params }, jwt); - const raw = listReposResponseSchema.parse(await response.json()); - return transformListReposResult({ - ...raw, - next_cursor: raw.next_cursor ?? undefined, - }); - } - - /** - * Find a repository by ID - * @param options The search options - * @returns The found repository - */ - async findOne(options: FindOneOptions): Promise { - const jwt = await this.generateJWT(options.id, { - permissions: ['git:read'], - ttl: DEFAULT_TOKEN_TTL_SECONDS, - }); - - // Allow 404 to indicate "not found" without throwing - const resp = await this.api.get('repo', jwt, { allowedStatus: [404] }); - if (resp.status === 404) { - return null; - } - const body = (await resp.json()) as { default_branch?: string }; - const defaultBranch = body.default_branch ?? 'main'; - return new RepoImpl(options.id, defaultBranch, this.options, this.generateJWT.bind(this)); - } - - /** - * Delete a repository by ID - * @param options The delete options containing the repo ID - * @returns The deletion result - */ - async deleteRepo(options: DeleteRepoOptions): Promise { - const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); - const jwt = await this.generateJWT(options.id, { - permissions: ['repo:write'], - ttl, - }); - - // Allow 404 and 409 for clearer error handling - const resp = await this.api.delete('repos/delete', jwt, { allowedStatus: [404, 409] }); - if (resp.status === 404) { - throw new Error('Repository not found'); - } - if (resp.status === 409) { - throw new Error('Repository already deleted'); - } - - const body = (await resp.json()) as { repo_id: string; message: string }; - return { - repoId: body.repo_id, - message: body.message, - }; - } - - /** - * Get the current configuration - * @returns The client configuration - */ - getConfig(): GitStorageOptions { - return { ...this.options }; - } - - /** - * Generate a JWT token for git storage URL authentication - * @private - */ - private async generateJWT(repoId: string, options?: GetRemoteURLOptions): Promise { - // Default permissions and TTL - const permissions = options?.permissions || ['git:write', 'git:read']; - const ttl = resolveInvocationTtlSeconds(options, this.options.defaultTTL ?? 365 * 24 * 60 * 60); - - // Create the JWT payload - const now = Math.floor(Date.now() / 1000); - const payload = { - iss: this.options.name, - sub: '@pierre/storage', - repo: repoId, - scopes: permissions, - iat: now, - exp: now + ttl, - }; - - // Sign the JWT with the key as the secret - // Using HS256 for symmetric signing with the key - const key = await importPKCS8(this.options.key, 'ES256'); - // Sign the JWT with the key as the secret - const jwt = await new SignJWT(payload) - .setProtectedHeader({ alg: 'ES256', typ: 'JWT' }) - .sign(key); - - return jwt; - } + private options: GitStorageOptions; + private api: ApiFetcher; + + constructor(options: GitStorageOptions) { + if ( + !options || + options.name === undefined || + options.key === undefined || + options.name === null || + options.key === null + ) { + throw new Error( + 'GitStorage requires a name and key. Please check your configuration and try again.' + ); + } + + if (typeof options.name !== 'string' || options.name.trim() === '') { + throw new Error('GitStorage name must be a non-empty string.'); + } + + if (typeof options.key !== 'string' || options.key.trim() === '') { + throw new Error('GitStorage key must be a non-empty string.'); + } + + const resolvedApiBaseUrl = + options.apiBaseUrl ?? GitStorage.getDefaultAPIBaseUrl(options.name); + const resolvedApiVersion = options.apiVersion ?? API_VERSION; + const resolvedStorageBaseUrl = + options.storageBaseUrl ?? + GitStorage.getDefaultStorageBaseUrl(options.name); + const resolvedDefaultTtl = options.defaultTTL; + + this.api = getApiInstance(resolvedApiBaseUrl, resolvedApiVersion); + + this.options = { + key: options.key, + name: options.name, + apiBaseUrl: resolvedApiBaseUrl, + apiVersion: resolvedApiVersion, + storageBaseUrl: resolvedStorageBaseUrl, + defaultTTL: resolvedDefaultTtl, + }; + } + + static getDefaultAPIBaseUrl(name: string): string { + return API_BASE_URL.replace('{{org}}', name); + } + + static getDefaultStorageBaseUrl(name: string): string { + return STORAGE_BASE_URL.replace('{{org}}', name); + } + + /** + * Create a new repository + * @returns The created repository + */ + async createRepo(options?: CreateRepoOptions): Promise { + const repoId = options?.id || crypto.randomUUID(); + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(repoId, { + permissions: ['repo:write'], + ttl, + }); + + const baseRepo = options?.baseRepo; + const isFork = baseRepo ? 'id' in baseRepo : false; + let baseRepoOptions: Record | null = null; + let resolvedDefaultBranch: string | undefined; + + if (baseRepo) { + if ('id' in baseRepo) { + const baseRepoToken = await this.generateJWT(baseRepo.id, { + permissions: ['git:read'], + ttl, + }); + baseRepoOptions = { + provider: 'code', + owner: this.options.name, + name: baseRepo.id, + operation: 'fork', + auth: { token: baseRepoToken }, + ...(baseRepo.ref ? { ref: baseRepo.ref } : {}), + ...(baseRepo.sha ? { sha: baseRepo.sha } : {}), + }; + } else { + baseRepoOptions = { + provider: 'github', + ...snakecaseKeys(baseRepo as unknown as Record), + }; + resolvedDefaultBranch = baseRepo.defaultBranch; + } + } + + // Match backend priority: baseRepo.defaultBranch > options.defaultBranch > 'main' + if (!resolvedDefaultBranch) { + if (options?.defaultBranch) { + resolvedDefaultBranch = options.defaultBranch; + } else if (!isFork) { + resolvedDefaultBranch = 'main'; + } + } + + const createRepoPath = + baseRepoOptions || resolvedDefaultBranch + ? { + path: 'repos', + body: { + ...(baseRepoOptions && { base_repo: baseRepoOptions }), + ...(resolvedDefaultBranch && { + default_branch: resolvedDefaultBranch, + }), + }, + } + : 'repos'; + + // Allow 409 so we can map it to a clearer error message + const resp = await this.api.post(createRepoPath, jwt, { + allowedStatus: [409], + }); + if (resp.status === 409) { + throw new Error('Repository already exists'); + } + + return new RepoImpl( + repoId, + resolvedDefaultBranch ?? 'main', + this.options, + this.generateJWT.bind(this) + ); + } + + /** + * List repositories for the authenticated organization + * @returns Paginated repositories list + */ + async listRepos(options?: ListReposOptions): Promise { + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT('org', { + permissions: ['org:read'], + ttl, + }); + + let params: Record | undefined; + if (options?.cursor || typeof options?.limit === 'number') { + params = {}; + if (options.cursor) { + params.cursor = options.cursor; + } + if (typeof options.limit === 'number') { + params.limit = options.limit.toString(); + } + } + + const response = await this.api.get({ path: 'repos', params }, jwt); + const raw = listReposResponseSchema.parse(await response.json()); + return transformListReposResult({ + ...raw, + next_cursor: raw.next_cursor ?? undefined, + }); + } + + /** + * Find a repository by ID + * @param options The search options + * @returns The found repository + */ + async findOne(options: FindOneOptions): Promise { + const jwt = await this.generateJWT(options.id, { + permissions: ['git:read'], + ttl: DEFAULT_TOKEN_TTL_SECONDS, + }); + + // Allow 404 to indicate "not found" without throwing + const resp = await this.api.get('repo', jwt, { allowedStatus: [404] }); + if (resp.status === 404) { + return null; + } + const body = (await resp.json()) as { default_branch?: string }; + const defaultBranch = body.default_branch ?? 'main'; + return new RepoImpl( + options.id, + defaultBranch, + this.options, + this.generateJWT.bind(this) + ); + } + + /** + * Delete a repository by ID + * @param options The delete options containing the repo ID + * @returns The deletion result + */ + async deleteRepo(options: DeleteRepoOptions): Promise { + const ttl = resolveInvocationTtlSeconds(options, DEFAULT_TOKEN_TTL_SECONDS); + const jwt = await this.generateJWT(options.id, { + permissions: ['repo:write'], + ttl, + }); + + // Allow 404 and 409 for clearer error handling + const resp = await this.api.delete('repos/delete', jwt, { + allowedStatus: [404, 409], + }); + if (resp.status === 404) { + throw new Error('Repository not found'); + } + if (resp.status === 409) { + throw new Error('Repository already deleted'); + } + + const body = (await resp.json()) as { repo_id: string; message: string }; + return { + repoId: body.repo_id, + message: body.message, + }; + } + + /** + * Get the current configuration + * @returns The client configuration + */ + getConfig(): GitStorageOptions { + return { ...this.options }; + } + + /** + * Generate a JWT token for git storage URL authentication + * @private + */ + private async generateJWT( + repoId: string, + options?: GetRemoteURLOptions + ): Promise { + // Default permissions and TTL + const permissions = options?.permissions || ['git:write', 'git:read']; + const ttl = resolveInvocationTtlSeconds( + options, + this.options.defaultTTL ?? 365 * 24 * 60 * 60 + ); + + // Create the JWT payload + const now = Math.floor(Date.now() / 1000); + const payload = { + iss: this.options.name, + sub: '@pierre/storage', + repo: repoId, + scopes: permissions, + iat: now, + exp: now + ttl, + }; + + // Sign the JWT with the key as the secret + // Using HS256 for symmetric signing with the key + const key = await importPKCS8(this.options.key, 'ES256'); + // Sign the JWT with the key as the secret + const jwt = await new SignJWT(payload) + .setProtectedHeader({ alg: 'ES256', typ: 'JWT' }) + .sign(key); + + return jwt; + } } // Export a default client factory export function createClient(options: GitStorageOptions): GitStorage { - return new GitStorage(options); + return new GitStorage(options); } // Export CodeStorage as an alias for GitStorage diff --git a/packages/git-storage-sdk-node/src/schemas.ts b/packages/git-storage-sdk-node/src/schemas.ts index 5a7989f02..1eb14deaf 100644 --- a/packages/git-storage-sdk-node/src/schemas.ts +++ b/packages/git-storage-sdk-node/src/schemas.ts @@ -1,205 +1,209 @@ import { z } from 'zod'; export const listFilesResponseSchema = z.object({ - paths: z.array(z.string()), - ref: z.string(), + paths: z.array(z.string()), + ref: z.string(), }); export const branchInfoSchema = z.object({ - cursor: z.string(), - name: z.string(), - head_sha: z.string(), - created_at: z.string(), + cursor: z.string(), + name: z.string(), + head_sha: z.string(), + created_at: z.string(), }); export const listBranchesResponseSchema = z.object({ - branches: z.array(branchInfoSchema), - next_cursor: z.string().nullable().optional(), - has_more: z.boolean(), + branches: z.array(branchInfoSchema), + next_cursor: z.string().nullable().optional(), + has_more: z.boolean(), }); export const commitInfoRawSchema = z.object({ - sha: z.string(), - message: z.string(), - author_name: z.string(), - author_email: z.string(), - committer_name: z.string(), - committer_email: z.string(), - date: z.string(), + sha: z.string(), + message: z.string(), + author_name: z.string(), + author_email: z.string(), + committer_name: z.string(), + committer_email: z.string(), + date: z.string(), }); export const listCommitsResponseSchema = z.object({ - commits: z.array(commitInfoRawSchema), - next_cursor: z.string().nullable().optional(), - has_more: z.boolean(), + commits: z.array(commitInfoRawSchema), + next_cursor: z.string().nullable().optional(), + has_more: z.boolean(), }); export const repoBaseInfoSchema = z.object({ - provider: z.string(), - owner: z.string(), - name: z.string(), + provider: z.string(), + owner: z.string(), + name: z.string(), }); export const repoInfoSchema = z.object({ - repo_id: z.string(), - url: z.string(), - default_branch: z.string(), - created_at: z.string(), - base_repo: repoBaseInfoSchema.optional().nullable(), + repo_id: z.string(), + url: z.string(), + default_branch: z.string(), + created_at: z.string(), + base_repo: repoBaseInfoSchema.optional().nullable(), }); export const listReposResponseSchema = z.object({ - repos: z.array(repoInfoSchema), - next_cursor: z.string().nullable().optional(), - has_more: z.boolean(), + repos: z.array(repoInfoSchema), + next_cursor: z.string().nullable().optional(), + has_more: z.boolean(), }); export const noteReadResponseSchema = z.object({ - sha: z.string(), - note: z.string(), - ref_sha: z.string(), + sha: z.string(), + note: z.string(), + ref_sha: z.string(), }); export const noteResultSchema = z.object({ - success: z.boolean(), - status: z.string(), - message: z.string().optional(), + success: z.boolean(), + status: z.string(), + message: z.string().optional(), }); export const noteWriteResponseSchema = z.object({ - sha: z.string(), - target_ref: z.string(), - base_commit: z.string().optional(), - new_ref_sha: z.string(), - result: noteResultSchema, + sha: z.string(), + target_ref: z.string(), + base_commit: z.string().optional(), + new_ref_sha: z.string(), + result: noteResultSchema, }); export const diffStatsSchema = z.object({ - files: z.number(), - additions: z.number(), - deletions: z.number(), - changes: z.number(), + files: z.number(), + additions: z.number(), + deletions: z.number(), + changes: z.number(), }); export const diffFileRawSchema = z.object({ - path: z.string(), - state: z.string(), - old_path: z.string().nullable().optional(), - raw: z.string(), - bytes: z.number(), - is_eof: z.boolean(), + path: z.string(), + state: z.string(), + old_path: z.string().nullable().optional(), + raw: z.string(), + bytes: z.number(), + is_eof: z.boolean(), }); export const filteredFileRawSchema = z.object({ - path: z.string(), - state: z.string(), - old_path: z.string().nullable().optional(), - bytes: z.number(), - is_eof: z.boolean(), + path: z.string(), + state: z.string(), + old_path: z.string().nullable().optional(), + bytes: z.number(), + is_eof: z.boolean(), }); export const branchDiffResponseSchema = z.object({ - branch: z.string(), - base: z.string(), - stats: diffStatsSchema, - files: z.array(diffFileRawSchema), - filtered_files: z.array(filteredFileRawSchema), + branch: z.string(), + base: z.string(), + stats: diffStatsSchema, + files: z.array(diffFileRawSchema), + filtered_files: z.array(filteredFileRawSchema), }); export const commitDiffResponseSchema = z.object({ - sha: z.string(), - stats: diffStatsSchema, - files: z.array(diffFileRawSchema), - filtered_files: z.array(filteredFileRawSchema), + sha: z.string(), + stats: diffStatsSchema, + files: z.array(diffFileRawSchema), + filtered_files: z.array(filteredFileRawSchema), }); export const createBranchResponseSchema = z.object({ - message: z.string(), - target_branch: z.string(), - target_is_ephemeral: z.boolean(), - commit_sha: z.string().nullable().optional(), + message: z.string(), + target_branch: z.string(), + target_is_ephemeral: z.boolean(), + commit_sha: z.string().nullable().optional(), }); export const refUpdateResultSchema = z.object({ - branch: z.string(), - old_sha: z.string(), - new_sha: z.string(), - success: z.boolean(), - status: z.string(), - message: z.string().optional(), + branch: z.string(), + old_sha: z.string(), + new_sha: z.string(), + success: z.boolean(), + status: z.string(), + message: z.string().optional(), }); export const commitPackCommitSchema = z.object({ - commit_sha: z.string(), - tree_sha: z.string(), - target_branch: z.string(), - pack_bytes: z.number(), - blob_count: z.number(), + commit_sha: z.string(), + tree_sha: z.string(), + target_branch: z.string(), + pack_bytes: z.number(), + blob_count: z.number(), }); -export const restoreCommitCommitSchema = commitPackCommitSchema.omit({ blob_count: true }); +export const restoreCommitCommitSchema = commitPackCommitSchema.omit({ + blob_count: true, +}); export const refUpdateResultWithOptionalsSchema = z.object({ - branch: z.string().optional(), - old_sha: z.string().optional(), - new_sha: z.string().optional(), - success: z.boolean().optional(), - status: z.string(), - message: z.string().optional(), + branch: z.string().optional(), + old_sha: z.string().optional(), + new_sha: z.string().optional(), + success: z.boolean().optional(), + status: z.string(), + message: z.string().optional(), }); export const commitPackAckSchema = z.object({ - commit: commitPackCommitSchema, - result: refUpdateResultSchema, + commit: commitPackCommitSchema, + result: refUpdateResultSchema, }); export const restoreCommitAckSchema = z.object({ - commit: restoreCommitCommitSchema, - result: refUpdateResultSchema.extend({ success: z.literal(true) }), + commit: restoreCommitCommitSchema, + result: refUpdateResultSchema.extend({ success: z.literal(true) }), }); export const commitPackResponseSchema = z.object({ - commit: commitPackCommitSchema.partial().optional().nullable(), - result: refUpdateResultWithOptionalsSchema, + commit: commitPackCommitSchema.partial().optional().nullable(), + result: refUpdateResultWithOptionalsSchema, }); export const restoreCommitResponseSchema = z.object({ - commit: restoreCommitCommitSchema.partial().optional().nullable(), - result: refUpdateResultWithOptionalsSchema, + commit: restoreCommitCommitSchema.partial().optional().nullable(), + result: refUpdateResultWithOptionalsSchema, }); export const grepLineSchema = z.object({ - line_number: z.number(), - text: z.string(), - type: z.string(), + line_number: z.number(), + text: z.string(), + type: z.string(), }); export const grepFileMatchSchema = z.object({ - path: z.string(), - lines: z.array(grepLineSchema), + path: z.string(), + lines: z.array(grepLineSchema), }); export const grepResponseSchema = z.object({ - query: z.object({ - pattern: z.string(), - case_sensitive: z.boolean(), - }), - repo: z.object({ - ref: z.string(), - commit: z.string(), - }), - matches: z.array(grepFileMatchSchema), - next_cursor: z.string().nullable().optional(), - has_more: z.boolean(), + query: z.object({ + pattern: z.string(), + case_sensitive: z.boolean(), + }), + repo: z.object({ + ref: z.string(), + commit: z.string(), + }), + matches: z.array(grepFileMatchSchema), + next_cursor: z.string().nullable().optional(), + has_more: z.boolean(), }); export const errorEnvelopeSchema = z.object({ - error: z.string(), + error: z.string(), }); export type ListFilesResponseRaw = z.infer; export type RawBranchInfo = z.infer; -export type ListBranchesResponseRaw = z.infer; +export type ListBranchesResponseRaw = z.infer< + typeof listBranchesResponseSchema +>; export type RawCommitInfo = z.infer; export type ListCommitsResponseRaw = z.infer; export type RawRepoBaseInfo = z.infer; @@ -211,7 +215,9 @@ export type RawFileDiff = z.infer; export type RawFilteredFile = z.infer; export type GetBranchDiffResponseRaw = z.infer; export type GetCommitDiffResponseRaw = z.infer; -export type CreateBranchResponseRaw = z.infer; +export type CreateBranchResponseRaw = z.infer< + typeof createBranchResponseSchema +>; export type CommitPackAckRaw = z.infer; export type RestoreCommitAckRaw = z.infer; export type GrepResponseRaw = z.infer; diff --git a/packages/git-storage-sdk-node/src/stream-utils.ts b/packages/git-storage-sdk-node/src/stream-utils.ts index af4078432..eb3f7d9ab 100644 --- a/packages/git-storage-sdk-node/src/stream-utils.ts +++ b/packages/git-storage-sdk-node/src/stream-utils.ts @@ -2,254 +2,271 @@ import type { BlobLike, FileLike, ReadableStreamLike } from './types'; type NodeBuffer = Uint8Array & { toString(encoding?: string): string }; interface NodeBufferConstructor { - from(data: Uint8Array): NodeBuffer; - from(data: string, encoding?: string): NodeBuffer; - isBuffer(value: unknown): value is NodeBuffer; + from(data: Uint8Array): NodeBuffer; + from(data: string, encoding?: string): NodeBuffer; + isBuffer(value: unknown): value is NodeBuffer; } const BufferCtor: NodeBufferConstructor | undefined = ( - globalThis as { Buffer?: NodeBufferConstructor } + globalThis as { Buffer?: NodeBufferConstructor } ).Buffer; export const MAX_CHUNK_BYTES = 4 * 1024 * 1024; export type ChunkSegment = { - chunk: Uint8Array; - eof: boolean; + chunk: Uint8Array; + eof: boolean; }; -export async function* chunkify(source: AsyncIterable): AsyncIterable { - let pending: Uint8Array | null = null; - let produced = false; - - for await (const value of source) { - const bytes = value; - - if (pending && pending.byteLength === MAX_CHUNK_BYTES) { - yield { chunk: pending, eof: false }; - produced = true; - pending = null; - } - - const merged: Uint8Array = pending ? concatChunks(pending, bytes) : bytes; - pending = null; - - let cursor: Uint8Array = merged; - while (cursor.byteLength > MAX_CHUNK_BYTES) { - const chunk: Uint8Array = cursor.slice(0, MAX_CHUNK_BYTES); - cursor = cursor.slice(MAX_CHUNK_BYTES); - yield { chunk, eof: false }; - produced = true; - } - - pending = cursor; - } - - if (pending) { - yield { chunk: pending, eof: true }; - produced = true; - } - - if (!produced) { - yield { chunk: new Uint8Array(0), eof: true }; - } +export async function* chunkify( + source: AsyncIterable +): AsyncIterable { + let pending: Uint8Array | null = null; + let produced = false; + + for await (const value of source) { + const bytes = value; + + if (pending && pending.byteLength === MAX_CHUNK_BYTES) { + yield { chunk: pending, eof: false }; + produced = true; + pending = null; + } + + const merged: Uint8Array = pending ? concatChunks(pending, bytes) : bytes; + pending = null; + + let cursor: Uint8Array = merged; + while (cursor.byteLength > MAX_CHUNK_BYTES) { + const chunk: Uint8Array = cursor.slice(0, MAX_CHUNK_BYTES); + cursor = cursor.slice(MAX_CHUNK_BYTES); + yield { chunk, eof: false }; + produced = true; + } + + pending = cursor; + } + + if (pending) { + yield { chunk: pending, eof: true }; + produced = true; + } + + if (!produced) { + yield { chunk: new Uint8Array(0), eof: true }; + } } export async function* toAsyncIterable( - source: - | string - | Uint8Array - | ArrayBuffer - | BlobLike - | FileLike - | ReadableStreamLike - | AsyncIterable - | Iterable, + source: + | string + | Uint8Array + | ArrayBuffer + | BlobLike + | FileLike + | ReadableStreamLike + | AsyncIterable + | Iterable ): AsyncIterable { - if (typeof source === 'string') { - yield new TextEncoder().encode(source); - return; - } - if (source instanceof Uint8Array) { - yield source; - return; - } - if (source instanceof ArrayBuffer) { - yield new Uint8Array(source); - return; - } - if (ArrayBuffer.isView(source)) { - yield new Uint8Array(source.buffer, source.byteOffset, source.byteLength); - return; - } - if (isBlobLike(source)) { - const stream = source.stream(); - if (isAsyncIterable(stream)) { - for await (const chunk of stream as AsyncIterable) { - yield ensureUint8Array(chunk); - } - return; - } - if (isReadableStreamLike(stream)) { - yield* readReadableStream(stream); - return; - } - } - if (isReadableStreamLike(source)) { - yield* readReadableStream(source); - return; - } - if (isAsyncIterable(source)) { - for await (const chunk of source as AsyncIterable) { - yield ensureUint8Array(chunk); - } - return; - } - if (isIterable(source)) { - for (const chunk of source as Iterable) { - yield ensureUint8Array(chunk); - } - return; - } - throw new Error('Unsupported content source; expected binary data'); + if (typeof source === 'string') { + yield new TextEncoder().encode(source); + return; + } + if (source instanceof Uint8Array) { + yield source; + return; + } + if (source instanceof ArrayBuffer) { + yield new Uint8Array(source); + return; + } + if (ArrayBuffer.isView(source)) { + yield new Uint8Array(source.buffer, source.byteOffset, source.byteLength); + return; + } + if (isBlobLike(source)) { + const stream = source.stream(); + if (isAsyncIterable(stream)) { + for await (const chunk of stream as AsyncIterable) { + yield ensureUint8Array(chunk); + } + return; + } + if (isReadableStreamLike(stream)) { + yield* readReadableStream(stream); + return; + } + } + if (isReadableStreamLike(source)) { + yield* readReadableStream(source); + return; + } + if (isAsyncIterable(source)) { + for await (const chunk of source as AsyncIterable) { + yield ensureUint8Array(chunk); + } + return; + } + if (isIterable(source)) { + for (const chunk of source as Iterable) { + yield ensureUint8Array(chunk); + } + return; + } + throw new Error('Unsupported content source; expected binary data'); } export function base64Encode(bytes: Uint8Array): string { - if (BufferCtor) { - return BufferCtor.from(bytes).toString('base64'); - } - let binary = ''; - for (let i = 0; i < bytes.byteLength; i++) { - binary += String.fromCharCode(bytes[i]); - } - const btoaFn = (globalThis as { btoa?: (data: string) => string }).btoa; - if (typeof btoaFn === 'function') { - return btoaFn(binary); - } - throw new Error('Base64 encoding is not supported in this environment'); + if (BufferCtor) { + return BufferCtor.from(bytes).toString('base64'); + } + let binary = ''; + for (let i = 0; i < bytes.byteLength; i++) { + binary += String.fromCharCode(bytes[i]); + } + const btoaFn = (globalThis as { btoa?: (data: string) => string }).btoa; + if (typeof btoaFn === 'function') { + return btoaFn(binary); + } + throw new Error('Base64 encoding is not supported in this environment'); } export function requiresDuplex(body: unknown): boolean { - if (!body || typeof body !== 'object') { - return false; - } - - if (typeof (body as { [Symbol.asyncIterator]?: unknown })[Symbol.asyncIterator] === 'function') { - return true; - } - - const readableStreamCtor = ( - globalThis as { - ReadableStream?: new (...args: unknown[]) => unknown; - } - ).ReadableStream; - if (readableStreamCtor && body instanceof readableStreamCtor) { - return true; - } - - return false; + if (!body || typeof body !== 'object') { + return false; + } + + if ( + typeof (body as { [Symbol.asyncIterator]?: unknown })[ + Symbol.asyncIterator + ] === 'function' + ) { + return true; + } + + const readableStreamCtor = ( + globalThis as { + ReadableStream?: new (...args: unknown[]) => unknown; + } + ).ReadableStream; + if (readableStreamCtor && body instanceof readableStreamCtor) { + return true; + } + + return false; } export function toRequestBody(iterable: AsyncIterable): unknown { - const readableStreamCtor = ( - globalThis as { ReadableStream?: new (underlyingSource: unknown) => unknown } - ).ReadableStream; - if (typeof readableStreamCtor === 'function') { - const iterator = iterable[Symbol.asyncIterator](); - return new readableStreamCtor({ - async pull(controller: { enqueue(chunk: Uint8Array): void; close(): void }) { - const { value, done } = await iterator.next(); - if (done) { - controller.close(); - return; - } - controller.enqueue(value!); - }, - async cancel(reason: unknown) { - if (typeof iterator.return === 'function') { - await iterator.return(reason); - } - }, - }); - } - return iterable; + const readableStreamCtor = ( + globalThis as { + ReadableStream?: new (underlyingSource: unknown) => unknown; + } + ).ReadableStream; + if (typeof readableStreamCtor === 'function') { + const iterator = iterable[Symbol.asyncIterator](); + return new readableStreamCtor({ + async pull(controller: { + enqueue(chunk: Uint8Array): void; + close(): void; + }) { + const { value, done } = await iterator.next(); + if (done) { + controller.close(); + return; + } + controller.enqueue(value!); + }, + async cancel(reason: unknown) { + if (typeof iterator.return === 'function') { + await iterator.return(reason); + } + }, + }); + } + return iterable; } -async function* readReadableStream(stream: ReadableStreamLike): AsyncIterable { - const reader = stream.getReader(); - try { - while (true) { - const { value, done } = await reader.read(); - if (done) { - break; - } - if (value !== undefined) { - yield ensureUint8Array(value); - } - } - } finally { - reader.releaseLock?.(); - } +async function* readReadableStream( + stream: ReadableStreamLike +): AsyncIterable { + const reader = stream.getReader(); + try { + while (true) { + const { value, done } = await reader.read(); + if (done) { + break; + } + if (value !== undefined) { + yield ensureUint8Array(value); + } + } + } finally { + reader.releaseLock?.(); + } } function ensureUint8Array(value: unknown): Uint8Array { - if (value instanceof Uint8Array) { - return value; - } - if (value instanceof ArrayBuffer) { - return new Uint8Array(value); - } - if (ArrayBuffer.isView(value)) { - return new Uint8Array(value.buffer, value.byteOffset, value.byteLength); - } - if (typeof value === 'string') { - return new TextEncoder().encode(value); - } - if (BufferCtor && BufferCtor.isBuffer(value)) { - return value as Uint8Array; - } - throw new Error('Unsupported chunk type; expected binary data'); + if (value instanceof Uint8Array) { + return value; + } + if (value instanceof ArrayBuffer) { + return new Uint8Array(value); + } + if (ArrayBuffer.isView(value)) { + return new Uint8Array(value.buffer, value.byteOffset, value.byteLength); + } + if (typeof value === 'string') { + return new TextEncoder().encode(value); + } + if (BufferCtor && BufferCtor.isBuffer(value)) { + return value as Uint8Array; + } + throw new Error('Unsupported chunk type; expected binary data'); } function isBlobLike(value: unknown): value is BlobLike { - return ( - typeof value === 'object' && value !== null && typeof (value as BlobLike).stream === 'function' - ); + return ( + typeof value === 'object' && + value !== null && + typeof (value as BlobLike).stream === 'function' + ); } -function isReadableStreamLike(value: unknown): value is ReadableStreamLike { - return ( - typeof value === 'object' && - value !== null && - typeof (value as ReadableStreamLike).getReader === 'function' - ); +function isReadableStreamLike( + value: unknown +): value is ReadableStreamLike { + return ( + typeof value === 'object' && + value !== null && + typeof (value as ReadableStreamLike).getReader === 'function' + ); } function isAsyncIterable(value: unknown): value is AsyncIterable { - return ( - typeof value === 'object' && - value !== null && - Symbol.asyncIterator in (value as Record) - ); + return ( + typeof value === 'object' && + value !== null && + Symbol.asyncIterator in (value as Record) + ); } function isIterable(value: unknown): value is Iterable { - return ( - typeof value === 'object' && - value !== null && - Symbol.iterator in (value as Record) - ); + return ( + typeof value === 'object' && + value !== null && + Symbol.iterator in (value as Record) + ); } function concatChunks(a: Uint8Array, b: Uint8Array): Uint8Array { - if (a.byteLength === 0) { - return b; - } - if (b.byteLength === 0) { - return a; - } - const merged = new Uint8Array(a.byteLength + b.byteLength); - merged.set(a, 0); - merged.set(b, a.byteLength); - return merged; + if (a.byteLength === 0) { + return b; + } + if (b.byteLength === 0) { + return a; + } + const merged = new Uint8Array(a.byteLength + b.byteLength); + merged.set(a, 0); + merged.set(b, a.byteLength); + return merged; } diff --git a/packages/git-storage-sdk-node/src/types.ts b/packages/git-storage-sdk-node/src/types.ts index 3e28743fd..cb9672a02 100644 --- a/packages/git-storage-sdk-node/src/types.ts +++ b/packages/git-storage-sdk-node/src/types.ts @@ -1,263 +1,264 @@ /** * Type definitions for Pierre Git Storage SDK */ - import type { - CreateBranchResponseRaw, - GetBranchDiffResponseRaw, - GetCommitDiffResponseRaw, - ListBranchesResponseRaw, - ListCommitsResponseRaw, - ListFilesResponseRaw, - ListReposResponseRaw, - NoteReadResponseRaw, - NoteWriteResponseRaw, - RawBranchInfo as SchemaRawBranchInfo, - RawCommitInfo as SchemaRawCommitInfo, - RawFileDiff as SchemaRawFileDiff, - RawFilteredFile as SchemaRawFilteredFile, - RawRepoBaseInfo as SchemaRawRepoBaseInfo, - RawRepoInfo as SchemaRawRepoInfo, + CreateBranchResponseRaw, + GetBranchDiffResponseRaw, + GetCommitDiffResponseRaw, + ListBranchesResponseRaw, + ListCommitsResponseRaw, + ListFilesResponseRaw, + ListReposResponseRaw, + NoteReadResponseRaw, + NoteWriteResponseRaw, + RawBranchInfo as SchemaRawBranchInfo, + RawCommitInfo as SchemaRawCommitInfo, + RawFileDiff as SchemaRawFileDiff, + RawFilteredFile as SchemaRawFilteredFile, + RawRepoBaseInfo as SchemaRawRepoBaseInfo, + RawRepoInfo as SchemaRawRepoInfo, } from './schemas'; export interface OverrideableGitStorageOptions { - apiBaseUrl?: string; - storageBaseUrl?: string; - apiVersion?: ValidAPIVersion; - defaultTTL?: number; + apiBaseUrl?: string; + storageBaseUrl?: string; + apiVersion?: ValidAPIVersion; + defaultTTL?: number; } export interface GitStorageOptions extends OverrideableGitStorageOptions { - key: string; - name: string; - defaultTTL?: number; + key: string; + name: string; + defaultTTL?: number; } export type ValidAPIVersion = 1; export interface GetRemoteURLOptions { - permissions?: ('git:write' | 'git:read' | 'repo:write' | 'org:read')[]; - ttl?: number; + permissions?: ('git:write' | 'git:read' | 'repo:write' | 'org:read')[]; + ttl?: number; } export interface Repo { - id: string; - defaultBranch: string; - getRemoteURL(options?: GetRemoteURLOptions): Promise; - getEphemeralRemoteURL(options?: GetRemoteURLOptions): Promise; - - getFileStream(options: GetFileOptions): Promise; - listFiles(options?: ListFilesOptions): Promise; - listBranches(options?: ListBranchesOptions): Promise; - listCommits(options?: ListCommitsOptions): Promise; - getNote(options: GetNoteOptions): Promise; - createNote(options: CreateNoteOptions): Promise; - appendNote(options: AppendNoteOptions): Promise; - deleteNote(options: DeleteNoteOptions): Promise; - getBranchDiff(options: GetBranchDiffOptions): Promise; - getCommitDiff(options: GetCommitDiffOptions): Promise; - grep(options: GrepOptions): Promise; - pullUpstream(options?: PullUpstreamOptions): Promise; - restoreCommit(options: RestoreCommitOptions): Promise; - createBranch(options: CreateBranchOptions): Promise; - createCommit(options: CreateCommitOptions): CommitBuilder; - createCommitFromDiff(options: CreateCommitFromDiffOptions): Promise; + id: string; + defaultBranch: string; + getRemoteURL(options?: GetRemoteURLOptions): Promise; + getEphemeralRemoteURL(options?: GetRemoteURLOptions): Promise; + + getFileStream(options: GetFileOptions): Promise; + listFiles(options?: ListFilesOptions): Promise; + listBranches(options?: ListBranchesOptions): Promise; + listCommits(options?: ListCommitsOptions): Promise; + getNote(options: GetNoteOptions): Promise; + createNote(options: CreateNoteOptions): Promise; + appendNote(options: AppendNoteOptions): Promise; + deleteNote(options: DeleteNoteOptions): Promise; + getBranchDiff(options: GetBranchDiffOptions): Promise; + getCommitDiff(options: GetCommitDiffOptions): Promise; + grep(options: GrepOptions): Promise; + pullUpstream(options?: PullUpstreamOptions): Promise; + restoreCommit(options: RestoreCommitOptions): Promise; + createBranch(options: CreateBranchOptions): Promise; + createCommit(options: CreateCommitOptions): CommitBuilder; + createCommitFromDiff( + options: CreateCommitFromDiffOptions + ): Promise; } export type ValidMethod = 'GET' | 'POST' | 'PUT' | 'DELETE'; type SimplePath = string; type ComplexPath = { - path: string; - params?: Record; - body?: Record; + path: string; + params?: Record; + body?: Record; }; export type ValidPath = SimplePath | ComplexPath; interface GitStorageInvocationOptions { - ttl?: number; + ttl?: number; } export interface FindOneOptions { - id: string; + id: string; } export type SupportedRepoProvider = 'github'; export interface GitHubBaseRepo { - /** - * @default github - */ - provider?: SupportedRepoProvider; - owner: string; - name: string; - defaultBranch?: string; + /** + * @default github + */ + provider?: SupportedRepoProvider; + owner: string; + name: string; + defaultBranch?: string; } export interface ForkBaseRepo { - id: string; - ref?: string; - sha?: string; + id: string; + ref?: string; + sha?: string; } export type BaseRepo = GitHubBaseRepo | ForkBaseRepo; export interface ListReposOptions extends GitStorageInvocationOptions { - cursor?: string; - limit?: number; + cursor?: string; + limit?: number; } export type RawRepoBaseInfo = SchemaRawRepoBaseInfo; export interface RepoBaseInfo { - provider: string; - owner: string; - name: string; + provider: string; + owner: string; + name: string; } export type RawRepoInfo = SchemaRawRepoInfo; export interface RepoInfo { - repoId: string; - url: string; - defaultBranch: string; - createdAt: string; - baseRepo?: RepoBaseInfo; + repoId: string; + url: string; + defaultBranch: string; + createdAt: string; + baseRepo?: RepoBaseInfo; } export type ListReposResponse = ListReposResponseRaw; export interface ListReposResult { - repos: RepoInfo[]; - nextCursor?: string; - hasMore: boolean; + repos: RepoInfo[]; + nextCursor?: string; + hasMore: boolean; } export interface CreateRepoOptions extends GitStorageInvocationOptions { - id?: string; - baseRepo?: BaseRepo; - defaultBranch?: string; + id?: string; + baseRepo?: BaseRepo; + defaultBranch?: string; } export interface DeleteRepoOptions extends GitStorageInvocationOptions { - id: string; + id: string; } export interface DeleteRepoResult { - repoId: string; - message: string; + repoId: string; + message: string; } // Get File API types export interface GetFileOptions extends GitStorageInvocationOptions { - path: string; - ref?: string; - ephemeral?: boolean; - ephemeralBase?: boolean; + path: string; + ref?: string; + ephemeral?: boolean; + ephemeralBase?: boolean; } export interface PullUpstreamOptions extends GitStorageInvocationOptions { - ref?: string; + ref?: string; } // List Files API types export interface ListFilesOptions extends GitStorageInvocationOptions { - ref?: string; - ephemeral?: boolean; + ref?: string; + ephemeral?: boolean; } export type ListFilesResponse = ListFilesResponseRaw; export interface ListFilesResult { - paths: string[]; - ref: string; + paths: string[]; + ref: string; } // List Branches API types export interface ListBranchesOptions extends GitStorageInvocationOptions { - cursor?: string; - limit?: number; + cursor?: string; + limit?: number; } export type RawBranchInfo = SchemaRawBranchInfo; export interface BranchInfo { - cursor: string; - name: string; - headSha: string; - createdAt: string; + cursor: string; + name: string; + headSha: string; + createdAt: string; } export type ListBranchesResponse = ListBranchesResponseRaw; export interface ListBranchesResult { - branches: BranchInfo[]; - nextCursor?: string; - hasMore: boolean; + branches: BranchInfo[]; + nextCursor?: string; + hasMore: boolean; } // Create Branch API types export interface CreateBranchOptions extends GitStorageInvocationOptions { - baseBranch: string; - targetBranch: string; - baseIsEphemeral?: boolean; - targetIsEphemeral?: boolean; + baseBranch: string; + targetBranch: string; + baseIsEphemeral?: boolean; + targetIsEphemeral?: boolean; } export type CreateBranchResponse = CreateBranchResponseRaw; export interface CreateBranchResult { - message: string; - targetBranch: string; - targetIsEphemeral: boolean; - commitSha?: string; + message: string; + targetBranch: string; + targetIsEphemeral: boolean; + commitSha?: string; } // List Commits API types export interface ListCommitsOptions extends GitStorageInvocationOptions { - branch?: string; - cursor?: string; - limit?: number; + branch?: string; + cursor?: string; + limit?: number; } export type RawCommitInfo = SchemaRawCommitInfo; export interface CommitInfo { - sha: string; - message: string; - authorName: string; - authorEmail: string; - committerName: string; - committerEmail: string; - date: Date; - rawDate: string; + sha: string; + message: string; + authorName: string; + authorEmail: string; + committerName: string; + committerEmail: string; + date: Date; + rawDate: string; } export type ListCommitsResponse = ListCommitsResponseRaw; export interface ListCommitsResult { - commits: CommitInfo[]; - nextCursor?: string; - hasMore: boolean; + commits: CommitInfo[]; + nextCursor?: string; + hasMore: boolean; } // Git notes API types export interface GetNoteOptions extends GitStorageInvocationOptions { - sha: string; + sha: string; } export type GetNoteResponse = NoteReadResponseRaw; export interface GetNoteResult { - sha: string; - note: string; - refSha: string; + sha: string; + note: string; + refSha: string; } interface NoteWriteBaseOptions extends GitStorageInvocationOptions { - sha: string; - note: string; - expectedRefSha?: string; - author?: CommitSignature; + sha: string; + note: string; + expectedRefSha?: string; + author?: CommitSignature; } export type CreateNoteOptions = NoteWriteBaseOptions; @@ -265,126 +266,126 @@ export type CreateNoteOptions = NoteWriteBaseOptions; export type AppendNoteOptions = NoteWriteBaseOptions; export interface DeleteNoteOptions extends GitStorageInvocationOptions { - sha: string; - expectedRefSha?: string; - author?: CommitSignature; + sha: string; + expectedRefSha?: string; + author?: CommitSignature; } export interface NoteWriteResultPayload { - success: boolean; - status: string; - message?: string; + success: boolean; + status: string; + message?: string; } export type NoteWriteResponse = NoteWriteResponseRaw; export interface NoteWriteResult { - sha: string; - targetRef: string; - baseCommit?: string; - newRefSha: string; - result: NoteWriteResultPayload; + sha: string; + targetRef: string; + baseCommit?: string; + newRefSha: string; + result: NoteWriteResultPayload; } // Branch Diff API types export interface GetBranchDiffOptions extends GitStorageInvocationOptions { - branch: string; - base?: string; - ephemeral?: boolean; - ephemeralBase?: boolean; - /** Optional paths to filter the diff to specific files */ - paths?: string[]; + branch: string; + base?: string; + ephemeral?: boolean; + ephemeralBase?: boolean; + /** Optional paths to filter the diff to specific files */ + paths?: string[]; } export type GetBranchDiffResponse = GetBranchDiffResponseRaw; export interface GetBranchDiffResult { - branch: string; - base: string; - stats: DiffStats; - files: FileDiff[]; - filteredFiles: FilteredFile[]; + branch: string; + base: string; + stats: DiffStats; + files: FileDiff[]; + filteredFiles: FilteredFile[]; } // Commit Diff API types export interface GetCommitDiffOptions extends GitStorageInvocationOptions { - sha: string; - baseSha?: string; - /** Optional paths to filter the diff to specific files */ - paths?: string[]; + sha: string; + baseSha?: string; + /** Optional paths to filter the diff to specific files */ + paths?: string[]; } export type GetCommitDiffResponse = GetCommitDiffResponseRaw; export interface GetCommitDiffResult { - sha: string; - stats: DiffStats; - files: FileDiff[]; - filteredFiles: FilteredFile[]; + sha: string; + stats: DiffStats; + files: FileDiff[]; + filteredFiles: FilteredFile[]; } // Grep API types export interface GrepOptions extends GitStorageInvocationOptions { - ref?: string; - paths?: string[]; - query: { - pattern: string; - /** - * Default is case-sensitive. - * When omitted, the server default is used. - */ - caseSensitive?: boolean; - }; - fileFilters?: { - includeGlobs?: string[]; - excludeGlobs?: string[]; - extensionFilters?: string[]; - }; - context?: { - before?: number; - after?: number; - }; - limits?: { - maxLines?: number; - maxMatchesPerFile?: number; - }; - pagination?: { - cursor?: string; - limit?: number; - }; + ref?: string; + paths?: string[]; + query: { + pattern: string; + /** + * Default is case-sensitive. + * When omitted, the server default is used. + */ + caseSensitive?: boolean; + }; + fileFilters?: { + includeGlobs?: string[]; + excludeGlobs?: string[]; + extensionFilters?: string[]; + }; + context?: { + before?: number; + after?: number; + }; + limits?: { + maxLines?: number; + maxMatchesPerFile?: number; + }; + pagination?: { + cursor?: string; + limit?: number; + }; } export interface GrepLine { - lineNumber: number; - text: string; - type: string; + lineNumber: number; + text: string; + type: string; } export interface GrepFileMatch { - path: string; - lines: GrepLine[]; + path: string; + lines: GrepLine[]; } export interface GrepResult { - query: { - pattern: string; - caseSensitive: boolean; - }; - repo: { - ref: string; - commit: string; - }; - matches: GrepFileMatch[]; - nextCursor?: string; - hasMore: boolean; + query: { + pattern: string; + caseSensitive: boolean; + }; + repo: { + ref: string; + commit: string; + }; + matches: GrepFileMatch[]; + nextCursor?: string; + hasMore: boolean; } // Shared diff types export interface DiffStats { - files: number; - additions: number; - deletions: number; - changes: number; + files: number; + additions: number; + deletions: number; + changes: number; } export type RawFileDiff = SchemaRawFileDiff; @@ -392,243 +393,254 @@ export type RawFileDiff = SchemaRawFileDiff; export type RawFilteredFile = SchemaRawFilteredFile; export type DiffFileState = - | 'added' - | 'modified' - | 'deleted' - | 'renamed' - | 'copied' - | 'type_changed' - | 'unmerged' - | 'unknown'; + | 'added' + | 'modified' + | 'deleted' + | 'renamed' + | 'copied' + | 'type_changed' + | 'unmerged' + | 'unknown'; export interface DiffFileBase { - path: string; - state: DiffFileState; - rawState: string; - oldPath?: string; - bytes: number; - isEof: boolean; + path: string; + state: DiffFileState; + rawState: string; + oldPath?: string; + bytes: number; + isEof: boolean; } export interface FileDiff extends DiffFileBase { - raw: string; + raw: string; } export interface FilteredFile extends DiffFileBase {} interface CreateCommitBaseOptions extends GitStorageInvocationOptions { - commitMessage: string; - expectedHeadSha?: string; - baseBranch?: string; - ephemeral?: boolean; - ephemeralBase?: boolean; - author: CommitSignature; - committer?: CommitSignature; - signal?: AbortSignal; + commitMessage: string; + expectedHeadSha?: string; + baseBranch?: string; + ephemeral?: boolean; + ephemeralBase?: boolean; + author: CommitSignature; + committer?: CommitSignature; + signal?: AbortSignal; } export interface CreateCommitBranchOptions extends CreateCommitBaseOptions { - targetBranch: string; - targetRef?: never; + targetBranch: string; + targetRef?: never; } /** * @deprecated Use {@link CreateCommitBranchOptions} instead. */ export interface LegacyCreateCommitOptions extends CreateCommitBaseOptions { - targetBranch?: never; - targetRef: string; + targetBranch?: never; + targetRef: string; } -export type CreateCommitOptions = CreateCommitBranchOptions | LegacyCreateCommitOptions; +export type CreateCommitOptions = + | CreateCommitBranchOptions + | LegacyCreateCommitOptions; export interface CommitSignature { - name: string; - email: string; + name: string; + email: string; } export interface ReadableStreamReaderLike { - read(): Promise<{ value?: T; done: boolean }>; - releaseLock?(): void; + read(): Promise<{ value?: T; done: boolean }>; + releaseLock?(): void; } export interface ReadableStreamLike { - getReader(): ReadableStreamReaderLike; + getReader(): ReadableStreamReaderLike; } export interface BlobLike { - stream(): unknown; + stream(): unknown; } export interface FileLike extends BlobLike { - name: string; - lastModified?: number; + name: string; + lastModified?: number; } export type GitFileMode = '100644' | '100755' | '120000' | '160000'; export type TextEncoding = - | 'ascii' - | 'utf8' - | 'utf-8' - | 'utf16le' - | 'utf-16le' - | 'ucs2' - | 'ucs-2' - | 'base64' - | 'base64url' - | 'latin1' - | 'binary' - | 'hex'; + | 'ascii' + | 'utf8' + | 'utf-8' + | 'utf16le' + | 'utf-16le' + | 'ucs2' + | 'ucs-2' + | 'base64' + | 'base64url' + | 'latin1' + | 'binary' + | 'hex'; export type CommitFileSource = - | string - | Uint8Array - | ArrayBuffer - | BlobLike - | FileLike - | ReadableStreamLike - | AsyncIterable - | Iterable; + | string + | Uint8Array + | ArrayBuffer + | BlobLike + | FileLike + | ReadableStreamLike + | AsyncIterable + | Iterable; export interface CommitFileOptions { - mode?: GitFileMode; + mode?: GitFileMode; } export interface CommitTextFileOptions extends CommitFileOptions { - encoding?: TextEncoding; + encoding?: TextEncoding; } export interface CommitBuilder { - addFile(path: string, source: CommitFileSource, options?: CommitFileOptions): CommitBuilder; - addFileFromString(path: string, contents: string, options?: CommitTextFileOptions): CommitBuilder; - deletePath(path: string): CommitBuilder; - send(): Promise; + addFile( + path: string, + source: CommitFileSource, + options?: CommitFileOptions + ): CommitBuilder; + addFileFromString( + path: string, + contents: string, + options?: CommitTextFileOptions + ): CommitBuilder; + deletePath(path: string): CommitBuilder; + send(): Promise; } export type DiffSource = CommitFileSource; -export interface CreateCommitFromDiffOptions extends GitStorageInvocationOptions { - targetBranch: string; - commitMessage: string; - diff: DiffSource; - expectedHeadSha?: string; - baseBranch?: string; - ephemeral?: boolean; - ephemeralBase?: boolean; - author: CommitSignature; - committer?: CommitSignature; - signal?: AbortSignal; +export interface CreateCommitFromDiffOptions + extends GitStorageInvocationOptions { + targetBranch: string; + commitMessage: string; + diff: DiffSource; + expectedHeadSha?: string; + baseBranch?: string; + ephemeral?: boolean; + ephemeralBase?: boolean; + author: CommitSignature; + committer?: CommitSignature; + signal?: AbortSignal; } export interface RefUpdate { - branch: string; - oldSha: string; - newSha: string; + branch: string; + oldSha: string; + newSha: string; } export type RefUpdateReason = - | 'precondition_failed' - | 'conflict' - | 'not_found' - | 'invalid' - | 'timeout' - | 'unauthorized' - | 'forbidden' - | 'unavailable' - | 'internal' - | 'failed' - | 'unknown'; + | 'precondition_failed' + | 'conflict' + | 'not_found' + | 'invalid' + | 'timeout' + | 'unauthorized' + | 'forbidden' + | 'unavailable' + | 'internal' + | 'failed' + | 'unknown'; export interface CommitResult { - commitSha: string; - treeSha: string; - targetBranch: string; - packBytes: number; - blobCount: number; - refUpdate: RefUpdate; + commitSha: string; + treeSha: string; + targetBranch: string; + packBytes: number; + blobCount: number; + refUpdate: RefUpdate; } export interface RestoreCommitOptions extends GitStorageInvocationOptions { - targetBranch: string; - targetCommitSha: string; - commitMessage?: string; - expectedHeadSha?: string; - author: CommitSignature; - committer?: CommitSignature; + targetBranch: string; + targetCommitSha: string; + commitMessage?: string; + expectedHeadSha?: string; + author: CommitSignature; + committer?: CommitSignature; } export interface RestoreCommitResult { - commitSha: string; - treeSha: string; - targetBranch: string; - packBytes: number; - refUpdate: RefUpdate; + commitSha: string; + treeSha: string; + targetBranch: string; + packBytes: number; + refUpdate: RefUpdate; } // Webhook types export interface WebhookValidationOptions { - /** - * Maximum age of webhook in seconds (default: 300 seconds / 5 minutes) - * Set to 0 to disable timestamp validation - */ - maxAgeSeconds?: number; + /** + * Maximum age of webhook in seconds (default: 300 seconds / 5 minutes) + * Set to 0 to disable timestamp validation + */ + maxAgeSeconds?: number; } export interface WebhookValidationResult { - /** - * Whether the webhook signature and timestamp are valid - */ - valid: boolean; - /** - * Error message if validation failed - */ - error?: string; - /** - * The parsed webhook event type (e.g., "push") - */ - eventType?: string; - /** - * The timestamp from the signature (Unix seconds) - */ - timestamp?: number; + /** + * Whether the webhook signature and timestamp are valid + */ + valid: boolean; + /** + * Error message if validation failed + */ + error?: string; + /** + * The parsed webhook event type (e.g., "push") + */ + eventType?: string; + /** + * The timestamp from the signature (Unix seconds) + */ + timestamp?: number; } // Webhook event payloads export interface RawWebhookPushEvent { - repository: { - id: string; - url: string; - }; - ref: string; - before: string; - after: string; - customer_id: string; - pushed_at: string; // RFC3339 timestamp + repository: { + id: string; + url: string; + }; + ref: string; + before: string; + after: string; + customer_id: string; + pushed_at: string; // RFC3339 timestamp } export interface WebhookPushEvent { - type: 'push'; - repository: { - id: string; - url: string; - }; - ref: string; - before: string; - after: string; - customerId: string; - pushedAt: Date; - rawPushedAt: string; + type: 'push'; + repository: { + id: string; + url: string; + }; + ref: string; + before: string; + after: string; + customerId: string; + pushedAt: Date; + rawPushedAt: string; } export interface WebhookUnknownEvent { - type: string; - raw: unknown; + type: string; + raw: unknown; } export type WebhookEventPayload = WebhookPushEvent | WebhookUnknownEvent; export interface ParsedWebhookSignature { - timestamp: string; - signature: string; + timestamp: string; + signature: string; } diff --git a/packages/git-storage-sdk-node/src/util.ts b/packages/git-storage-sdk-node/src/util.ts index 0586ba11f..55aa32597 100644 --- a/packages/git-storage-sdk-node/src/util.ts +++ b/packages/git-storage-sdk-node/src/util.ts @@ -1,44 +1,51 @@ -export function timingSafeEqual(a: string | Uint8Array, b: string | Uint8Array): boolean { - const bufferA = typeof a === 'string' ? new TextEncoder().encode(a) : a; - const bufferB = typeof b === 'string' ? new TextEncoder().encode(b) : b; +export function timingSafeEqual( + a: string | Uint8Array, + b: string | Uint8Array +): boolean { + const bufferA = typeof a === 'string' ? new TextEncoder().encode(a) : a; + const bufferB = typeof b === 'string' ? new TextEncoder().encode(b) : b; - if (bufferA.length !== bufferB.length) return false; + if (bufferA.length !== bufferB.length) return false; - let result = 0; - for (let i = 0; i < bufferA.length; i++) { - result |= bufferA[i] ^ bufferB[i]; - } - return result === 0; + let result = 0; + for (let i = 0; i < bufferA.length; i++) { + result |= bufferA[i] ^ bufferB[i]; + } + return result === 0; } export async function getEnvironmentCrypto() { - if (!globalThis.crypto) { - const { webcrypto } = await import('node:crypto'); - return webcrypto; - } - return globalThis.crypto; + if (!globalThis.crypto) { + const { webcrypto } = await import('node:crypto'); + return webcrypto; + } + return globalThis.crypto; } -export async function createHmac(algorithm: string, secret: string, data: string): Promise { - if (algorithm !== 'sha256') { - throw new Error('Only sha256 algorithm is supported'); - } - if (!secret || secret.length === 0) { - throw new Error('Secret is required'); - } +export async function createHmac( + algorithm: string, + secret: string, + data: string +): Promise { + if (algorithm !== 'sha256') { + throw new Error('Only sha256 algorithm is supported'); + } + if (!secret || secret.length === 0) { + throw new Error('Secret is required'); + } - const crypto = await getEnvironmentCrypto(); - const encoder = new TextEncoder(); - const key = await crypto.subtle.importKey( - 'raw', - encoder.encode(secret), - { name: 'HMAC', hash: 'SHA-256' }, - false, - ['sign'], - ); + const crypto = await getEnvironmentCrypto(); + const encoder = new TextEncoder(); + const key = await crypto.subtle.importKey( + 'raw', + encoder.encode(secret), + { name: 'HMAC', hash: 'SHA-256' }, + false, + ['sign'] + ); - const signature = await crypto.subtle.sign('HMAC', key, encoder.encode(data)); - return Array.from(new Uint8Array(signature)) - .map((b) => b.toString(16).padStart(2, '0')) - .join(''); + const signature = await crypto.subtle.sign('HMAC', key, encoder.encode(data)); + return Array.from(new Uint8Array(signature)) + .map((b) => b.toString(16).padStart(2, '0')) + .join(''); } diff --git a/packages/git-storage-sdk-node/src/version.ts b/packages/git-storage-sdk-node/src/version.ts index 959ccdc45..1c9208f9e 100644 --- a/packages/git-storage-sdk-node/src/version.ts +++ b/packages/git-storage-sdk-node/src/version.ts @@ -4,5 +4,5 @@ export const PACKAGE_NAME = 'code-storage-sdk'; export const PACKAGE_VERSION = packageJson.version; export function getUserAgent(): string { - return `${PACKAGE_NAME}/${PACKAGE_VERSION}`; + return `${PACKAGE_NAME}/${PACKAGE_VERSION}`; } diff --git a/packages/git-storage-sdk-node/src/webhook.ts b/packages/git-storage-sdk-node/src/webhook.ts index 1c9a99493..7442823dc 100644 --- a/packages/git-storage-sdk-node/src/webhook.ts +++ b/packages/git-storage-sdk-node/src/webhook.ts @@ -1,16 +1,14 @@ /** * Webhook validation utilities for Pierre Git Storage */ - import type { - ParsedWebhookSignature, - RawWebhookPushEvent, - WebhookEventPayload, - WebhookPushEvent, - WebhookValidationOptions, - WebhookValidationResult, + ParsedWebhookSignature, + RawWebhookPushEvent, + WebhookEventPayload, + WebhookPushEvent, + WebhookValidationOptions, + WebhookValidationResult, } from './types'; - import { createHmac, timingSafeEqual } from './util'; const DEFAULT_MAX_AGE_SECONDS = 300; // 5 minutes @@ -19,39 +17,41 @@ const DEFAULT_MAX_AGE_SECONDS = 300; // 5 minutes * Parse the X-Pierre-Signature header * Format: t=,sha256= */ -export function parseSignatureHeader(header: string): ParsedWebhookSignature | null { - if (!header || typeof header !== 'string') { - return null; - } - - let timestamp = ''; - let signature = ''; - - // Split by comma and parse each element - const elements = header.split(','); - for (const element of elements) { - const trimmedElement = element.trim(); - const parts = trimmedElement.split('=', 2); - if (parts.length !== 2) { - continue; - } - - const [key, value] = parts; - switch (key) { - case 't': - timestamp = value; - break; - case 'sha256': - signature = value; - break; - } - } - - if (!timestamp || !signature) { - return null; - } - - return { timestamp, signature }; +export function parseSignatureHeader( + header: string +): ParsedWebhookSignature | null { + if (!header || typeof header !== 'string') { + return null; + } + + let timestamp = ''; + let signature = ''; + + // Split by comma and parse each element + const elements = header.split(','); + for (const element of elements) { + const trimmedElement = element.trim(); + const parts = trimmedElement.split('=', 2); + if (parts.length !== 2) { + continue; + } + + const [key, value] = parts; + switch (key) { + case 't': + timestamp = value; + break; + case 'sha256': + signature = value; + break; + } + } + + if (!timestamp || !signature) { + return null; + } + + return { timestamp, signature }; } /** @@ -78,94 +78,95 @@ export function parseSignatureHeader(header: string): ParsedWebhookSignature | n * ``` */ export async function validateWebhookSignature( - payload: string | Buffer, - signatureHeader: string, - secret: string, - options: WebhookValidationOptions = {}, + payload: string | Buffer, + signatureHeader: string, + secret: string, + options: WebhookValidationOptions = {} ): Promise { - if (!secret || secret.length === 0) { - return { - valid: false, - error: 'Empty secret is not allowed', - }; - } - - // Parse the signature header - const parsed = parseSignatureHeader(signatureHeader); - if (!parsed) { - return { - valid: false, - error: 'Invalid signature header format', - }; - } - - // Parse timestamp - const timestamp = Number.parseInt(parsed.timestamp, 10); - if (isNaN(timestamp)) { - return { - valid: false, - error: 'Invalid timestamp in signature', - }; - } - - // Validate timestamp age (prevent replay attacks) - const maxAge = options.maxAgeSeconds ?? DEFAULT_MAX_AGE_SECONDS; - if (maxAge > 0) { - const now = Math.floor(Date.now() / 1000); - const age = now - timestamp; - - if (age > maxAge) { - return { - valid: false, - error: `Webhook timestamp too old (${age} seconds)`, - timestamp, - }; - } - - // Also reject timestamps from the future (clock skew tolerance of 60 seconds) - if (age < -60) { - return { - valid: false, - error: 'Webhook timestamp is in the future', - timestamp, - }; - } - } - - // Convert payload to string if it's a Buffer - const payloadStr = typeof payload === 'string' ? payload : payload.toString('utf8'); - - // Compute expected signature - // Format: HMAC-SHA256(secret, timestamp + "." + payload) - const signedData = `${parsed.timestamp}.${payloadStr}`; - const expectedSignature = await createHmac('sha256', secret, signedData); - - // Compare signatures using constant-time comparison - const expectedBuffer = Buffer.from(expectedSignature); - const actualBuffer = Buffer.from(parsed.signature); - - // Ensure both buffers are the same length for timing-safe comparison - if (expectedBuffer.length !== actualBuffer.length) { - return { - valid: false, - error: 'Invalid signature', - timestamp, - }; - } - - const signaturesMatch = timingSafeEqual(expectedBuffer, actualBuffer); - if (!signaturesMatch) { - return { - valid: false, - error: 'Invalid signature', - timestamp, - }; - } - - return { - valid: true, - timestamp, - }; + if (!secret || secret.length === 0) { + return { + valid: false, + error: 'Empty secret is not allowed', + }; + } + + // Parse the signature header + const parsed = parseSignatureHeader(signatureHeader); + if (!parsed) { + return { + valid: false, + error: 'Invalid signature header format', + }; + } + + // Parse timestamp + const timestamp = Number.parseInt(parsed.timestamp, 10); + if (isNaN(timestamp)) { + return { + valid: false, + error: 'Invalid timestamp in signature', + }; + } + + // Validate timestamp age (prevent replay attacks) + const maxAge = options.maxAgeSeconds ?? DEFAULT_MAX_AGE_SECONDS; + if (maxAge > 0) { + const now = Math.floor(Date.now() / 1000); + const age = now - timestamp; + + if (age > maxAge) { + return { + valid: false, + error: `Webhook timestamp too old (${age} seconds)`, + timestamp, + }; + } + + // Also reject timestamps from the future (clock skew tolerance of 60 seconds) + if (age < -60) { + return { + valid: false, + error: 'Webhook timestamp is in the future', + timestamp, + }; + } + } + + // Convert payload to string if it's a Buffer + const payloadStr = + typeof payload === 'string' ? payload : payload.toString('utf8'); + + // Compute expected signature + // Format: HMAC-SHA256(secret, timestamp + "." + payload) + const signedData = `${parsed.timestamp}.${payloadStr}`; + const expectedSignature = await createHmac('sha256', secret, signedData); + + // Compare signatures using constant-time comparison + const expectedBuffer = Buffer.from(expectedSignature); + const actualBuffer = Buffer.from(parsed.signature); + + // Ensure both buffers are the same length for timing-safe comparison + if (expectedBuffer.length !== actualBuffer.length) { + return { + valid: false, + error: 'Invalid signature', + timestamp, + }; + } + + const signaturesMatch = timingSafeEqual(expectedBuffer, actualBuffer); + if (!signaturesMatch) { + return { + valid: false, + error: 'Invalid signature', + timestamp, + }; + } + + return { + valid: true, + timestamp, + }; } /** @@ -196,128 +197,132 @@ export async function validateWebhookSignature( * ``` */ export async function validateWebhook( - payload: string | Buffer, - headers: Record, - secret: string, - options: WebhookValidationOptions = {}, + payload: string | Buffer, + headers: Record, + secret: string, + options: WebhookValidationOptions = {} ): Promise { - // Get signature header - const signatureHeader = headers['x-pierre-signature'] || headers['X-Pierre-Signature']; - if (!signatureHeader || Array.isArray(signatureHeader)) { - return { - valid: false, - error: 'Missing or invalid X-Pierre-Signature header', - }; - } - - // Get event type header - const eventType = headers['x-pierre-event'] || headers['X-Pierre-Event']; - if (!eventType || Array.isArray(eventType)) { - return { - valid: false, - error: 'Missing or invalid X-Pierre-Event header', - }; - } - - // Validate signature - const validationResult = await validateWebhookSignature( - payload, - signatureHeader, - secret, - options, - ); - - if (!validationResult.valid) { - return validationResult; - } - - // Parse payload - const payloadStr = typeof payload === 'string' ? payload : payload.toString('utf8'); - let parsedJson: unknown; - try { - parsedJson = JSON.parse(payloadStr); - } catch { - return { - valid: false, - error: 'Invalid JSON payload', - timestamp: validationResult.timestamp, - }; - } - - const conversion = convertWebhookPayload(String(eventType), parsedJson); - if (!conversion.valid) { - return { - valid: false, - error: conversion.error, - timestamp: validationResult.timestamp, - }; - } - - return { - valid: true, - eventType, - timestamp: validationResult.timestamp, - payload: conversion.payload, - }; + // Get signature header + const signatureHeader = + headers['x-pierre-signature'] || headers['X-Pierre-Signature']; + if (!signatureHeader || Array.isArray(signatureHeader)) { + return { + valid: false, + error: 'Missing or invalid X-Pierre-Signature header', + }; + } + + // Get event type header + const eventType = headers['x-pierre-event'] || headers['X-Pierre-Event']; + if (!eventType || Array.isArray(eventType)) { + return { + valid: false, + error: 'Missing or invalid X-Pierre-Event header', + }; + } + + // Validate signature + const validationResult = await validateWebhookSignature( + payload, + signatureHeader, + secret, + options + ); + + if (!validationResult.valid) { + return validationResult; + } + + // Parse payload + const payloadStr = + typeof payload === 'string' ? payload : payload.toString('utf8'); + let parsedJson: unknown; + try { + parsedJson = JSON.parse(payloadStr); + } catch { + return { + valid: false, + error: 'Invalid JSON payload', + timestamp: validationResult.timestamp, + }; + } + + const conversion = convertWebhookPayload(String(eventType), parsedJson); + if (!conversion.valid) { + return { + valid: false, + error: conversion.error, + timestamp: validationResult.timestamp, + }; + } + + return { + valid: true, + eventType, + timestamp: validationResult.timestamp, + payload: conversion.payload, + }; } function convertWebhookPayload( - eventType: string, - raw: unknown, -): { valid: true; payload: WebhookEventPayload } | { valid: false; error: string } { - if (eventType === 'push') { - if (!isRawWebhookPushEvent(raw)) { - return { - valid: false, - error: 'Invalid push payload', - }; - } - return { - valid: true, - payload: transformPushEvent(raw), - }; - } - const fallbackPayload = { type: eventType, raw }; - return { - valid: true, - payload: fallbackPayload, - }; + eventType: string, + raw: unknown +): + | { valid: true; payload: WebhookEventPayload } + | { valid: false; error: string } { + if (eventType === 'push') { + if (!isRawWebhookPushEvent(raw)) { + return { + valid: false, + error: 'Invalid push payload', + }; + } + return { + valid: true, + payload: transformPushEvent(raw), + }; + } + const fallbackPayload = { type: eventType, raw }; + return { + valid: true, + payload: fallbackPayload, + }; } function transformPushEvent(raw: RawWebhookPushEvent): WebhookPushEvent { - return { - type: 'push' as const, - repository: { - id: raw.repository.id, - url: raw.repository.url, - }, - ref: raw.ref, - before: raw.before, - after: raw.after, - customerId: raw.customer_id, - pushedAt: new Date(raw.pushed_at), - rawPushedAt: raw.pushed_at, - }; + return { + type: 'push' as const, + repository: { + id: raw.repository.id, + url: raw.repository.url, + }, + ref: raw.ref, + before: raw.before, + after: raw.after, + customerId: raw.customer_id, + pushedAt: new Date(raw.pushed_at), + rawPushedAt: raw.pushed_at, + }; } function isRawWebhookPushEvent(value: unknown): value is RawWebhookPushEvent { - if (!isRecord(value)) { - return false; - } - if (!isRecord(value.repository)) { - return false; - } - return ( - typeof value.repository.id === 'string' && - typeof value.repository.url === 'string' && - typeof value.ref === 'string' && - typeof value.before === 'string' && - typeof value.after === 'string' && - typeof value.customer_id === 'string' && - typeof value.pushed_at === 'string' - ); + if (!isRecord(value)) { + return false; + } + if (!isRecord(value.repository)) { + return false; + } + return ( + typeof value.repository.id === 'string' && + typeof value.repository.url === 'string' && + typeof value.ref === 'string' && + typeof value.before === 'string' && + typeof value.after === 'string' && + typeof value.customer_id === 'string' && + typeof value.pushed_at === 'string' + ); } function isRecord(value: unknown): value is Record { - return typeof value === 'object' && value !== null; + return typeof value === 'object' && value !== null; } diff --git a/packages/git-storage-sdk-node/tests/commit-from-diff.test.ts b/packages/git-storage-sdk-node/tests/commit-from-diff.test.ts index 21a5ff2a1..5582be4a6 100644 --- a/packages/git-storage-sdk-node/tests/commit-from-diff.test.ts +++ b/packages/git-storage-sdk-node/tests/commit-from-diff.test.ts @@ -1,5 +1,6 @@ import { ReadableStream } from 'node:stream/web'; import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; + import { GitStorage, RefUpdateError } from '../src/index'; const key = `-----BEGIN PRIVATE KEY----- @@ -11,333 +12,345 @@ yTh6suablSura7ZDG8hpm3oNsq/ykC3Scfsw6ZTuuVuLlXKV/be/Xr0d type MockFetch = ReturnType; function ensureMockFetch(): MockFetch { - const existing = globalThis.fetch as unknown; - if (existing && typeof existing === 'function' && 'mock' in (existing as any)) { - return existing as MockFetch; - } - const mock = vi.fn(); - vi.stubGlobal('fetch', mock); - return mock; + const existing = globalThis.fetch as unknown; + if ( + existing && + typeof existing === 'function' && + 'mock' in (existing as any) + ) { + return existing as MockFetch; + } + const mock = vi.fn(); + vi.stubGlobal('fetch', mock); + return mock; } async function readRequestBody(body: unknown): Promise { - if (!body) { - return ''; - } - if (typeof body === 'string') { - return body; - } - if (body instanceof Uint8Array) { - return new TextDecoder().decode(body); - } - if (isReadableStream(body)) { - const reader = body.getReader(); - const chunks: Uint8Array[] = []; - while (true) { - const { value, done } = await reader.read(); - if (done) { - break; - } - if (value) { - chunks.push(value); - } - } - reader.releaseLock?.(); - return decodeChunks(chunks); - } - if (isAsyncIterable(body)) { - const chunks: Uint8Array[] = []; - for await (const value of body as AsyncIterable) { - if (value instanceof Uint8Array) { - chunks.push(value); - } else if (typeof value === 'string') { - chunks.push(new TextEncoder().encode(value)); - } else if (value instanceof ArrayBuffer) { - chunks.push(new Uint8Array(value)); - } - } - return decodeChunks(chunks); - } - return ''; + if (!body) { + return ''; + } + if (typeof body === 'string') { + return body; + } + if (body instanceof Uint8Array) { + return new TextDecoder().decode(body); + } + if (isReadableStream(body)) { + const reader = body.getReader(); + const chunks: Uint8Array[] = []; + while (true) { + const { value, done } = await reader.read(); + if (done) { + break; + } + if (value) { + chunks.push(value); + } + } + reader.releaseLock?.(); + return decodeChunks(chunks); + } + if (isAsyncIterable(body)) { + const chunks: Uint8Array[] = []; + for await (const value of body as AsyncIterable) { + if (value instanceof Uint8Array) { + chunks.push(value); + } else if (typeof value === 'string') { + chunks.push(new TextEncoder().encode(value)); + } else if (value instanceof ArrayBuffer) { + chunks.push(new Uint8Array(value)); + } + } + return decodeChunks(chunks); + } + return ''; } function isReadableStream(value: unknown): value is ReadableStream { - return ( - typeof value === 'object' && - value !== null && - typeof (value as ReadableStream).getReader === 'function' - ); + return ( + typeof value === 'object' && + value !== null && + typeof (value as ReadableStream).getReader === 'function' + ); } function isAsyncIterable(value: unknown): value is AsyncIterable { - return ( - typeof value === 'object' && - value !== null && - Symbol.asyncIterator in (value as Record) - ); + return ( + typeof value === 'object' && + value !== null && + Symbol.asyncIterator in (value as Record) + ); } function decodeChunks(chunks: Uint8Array[]): string { - if (chunks.length === 0) { - return ''; - } - if (chunks.length === 1) { - return new TextDecoder().decode(chunks[0]); - } - const total = chunks.reduce((sum, chunk) => sum + chunk.byteLength, 0); - const combined = new Uint8Array(total); - let offset = 0; - for (const chunk of chunks) { - combined.set(chunk, offset); - offset += chunk.byteLength; - } - return new TextDecoder().decode(combined); + if (chunks.length === 0) { + return ''; + } + if (chunks.length === 1) { + return new TextDecoder().decode(chunks[0]); + } + const total = chunks.reduce((sum, chunk) => sum + chunk.byteLength, 0); + const combined = new Uint8Array(total); + let offset = 0; + for (const chunk of chunks) { + combined.set(chunk, offset); + offset += chunk.byteLength; + } + return new TextDecoder().decode(combined); } describe('createCommitFromDiff', () => { - const mockFetch = ensureMockFetch(); - let randomSpy: ReturnType | undefined; - - beforeEach(() => { - mockFetch.mockReset(); - mockFetch.mockImplementation(() => - Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ repo_id: 'repo-id', url: 'https://example.git' }), - }), - ); - randomSpy = vi - .spyOn(globalThis.crypto, 'randomUUID') - .mockImplementation(() => 'cid-fixed' as any); - }); - - afterEach(() => { - randomSpy?.mockRestore(); - }); - - it('streams metadata and diff chunks in NDJSON order', async () => { - const store = new GitStorage({ name: 'v0', key }); - - const commitAck = { - commit: { - commit_sha: 'def456', - tree_sha: 'abc123', - target_branch: 'main', - pack_bytes: 84, - blob_count: 0, - }, - result: { - branch: 'main', - old_sha: '0000000000000000000000000000000000000000', - new_sha: 'def456', - success: true, - status: 'ok', - }, - }; - - // createRepo call - mockFetch.mockImplementationOnce(() => - Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ repo_id: 'repo-main', url: 'https://repo.git' }), - }), - ); - - // diff commit call - mockFetch.mockImplementationOnce(async (url, init) => { - expect(String(url)).toMatch(/\/api\/v1\/repos\/diff-commit$/); - expect(init?.method).toBe('POST'); - const headers = init?.headers as Record; - expect(headers.Authorization).toMatch(/^Bearer\s.+/); - expect(headers['Content-Type']).toBe('application/x-ndjson'); - - const body = await readRequestBody(init?.body); - const lines = body.trim().split('\n'); - expect(lines).toHaveLength(2); - - const metadataFrame = JSON.parse(lines[0]); - expect(metadataFrame.metadata).toEqual({ - target_branch: 'main', - expected_head_sha: 'abc123', - commit_message: 'Apply patch', - author: { - name: 'Author Name', - email: 'author@example.com', - }, - }); - - const chunkFrame = JSON.parse(lines[1]); - expect(chunkFrame.diff_chunk.eof).toBe(true); - const decoded = Buffer.from(chunkFrame.diff_chunk.data, 'base64').toString('utf8'); - expect(decoded).toBe('diff --git a/file.txt b/file.txt\n'); - - return { - ok: true, - status: 200, - json: async () => commitAck, - }; - }); - - const repo = await store.createRepo({ id: 'repo-main' }); - const result = await repo.createCommitFromDiff({ - targetBranch: 'main', - commitMessage: 'Apply patch', - expectedHeadSha: 'abc123', - author: { name: 'Author Name', email: 'author@example.com' }, - diff: 'diff --git a/file.txt b/file.txt\n', - }); - - expect(result).toEqual({ - commitSha: 'def456', - treeSha: 'abc123', - targetBranch: 'main', - packBytes: 84, - blobCount: 0, - refUpdate: { - branch: 'main', - oldSha: '0000000000000000000000000000000000000000', - newSha: 'def456', - }, - }); - }); - - it('requires diff content before sending', async () => { - const store = new GitStorage({ name: 'v0', key }); - mockFetch.mockImplementationOnce(() => - Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ repo_id: 'repo-main', url: 'https://repo.git' }), - }), - ); - - const repo = await store.createRepo({ id: 'repo-main' }); - await expect( - repo.createCommitFromDiff({ - targetBranch: 'main', - commitMessage: 'Apply patch', - author: { name: 'Author', email: 'author@example.com' }, - diff: undefined as unknown as string, - }), - ).rejects.toThrow('createCommitFromDiff diff is required'); - }); - - it('converts error responses into RefUpdateError', async () => { - const store = new GitStorage({ name: 'v0', key }); - - mockFetch.mockImplementationOnce(() => - Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ repo_id: 'repo-main', url: 'https://repo.git' }), - }), - ); - - mockFetch.mockImplementationOnce(async () => { - const response = { - ok: false, - status: 409, - statusText: 'Conflict', - async json() { - return { - result: { - status: 'conflict', - message: 'Head moved', - branch: 'main', - old_sha: 'abc', - new_sha: 'def', - }, - }; - }, - async text() { - return ''; - }, - clone() { - return this; - }, - }; - return response; - }); - - const repo = await store.createRepo({ id: 'repo-main' }); - - const promise = repo.createCommitFromDiff({ - targetBranch: 'refs/heads/main', - commitMessage: 'Apply patch', - expectedHeadSha: 'abc', - author: { name: 'Author', email: 'author@example.com' }, - diff: 'diff --git a/file.txt b/file.txt\n', - }); - - await expect(promise).rejects.toThrow(RefUpdateError); - - await promise.catch((error) => { - if (!(error instanceof RefUpdateError)) { - throw error; - } - expect(error.status).toBe('conflict'); - expect(error.refUpdate).toEqual({ - branch: 'main', - oldSha: 'abc', - newSha: 'def', - }); - }); - }); - - it('includes Code-Storage-Agent header in diff commit requests', async () => { - const store = new GitStorage({ name: 'v0', key }); - - mockFetch.mockImplementationOnce(() => - Promise.resolve({ - ok: true, - status: 200, - json: async () => ({ repo_id: 'repo-user-agent', url: 'https://repo.git' }), - }), - ); - - const commitAck = { - commit: { - commit_sha: 'useragent123', - tree_sha: 'tree456', - target_branch: 'main', - pack_bytes: 42, - blob_count: 0, - }, - result: { - branch: 'main', - old_sha: '0000000000000000000000000000000000000000', - new_sha: 'useragent123', - success: true, - status: 'ok', - }, - }; - - let capturedHeaders: Record | undefined; - mockFetch.mockImplementationOnce(async (_url, init) => { - capturedHeaders = init?.headers as Record; - return { - ok: true, - status: 200, - json: async () => commitAck, - }; - }); - - const repo = await store.createRepo({ id: 'repo-user-agent' }); - await repo.createCommitFromDiff({ - targetBranch: 'main', - commitMessage: 'Test user agent', - author: { name: 'Author Name', email: 'author@example.com' }, - diff: 'diff --git a/test.txt b/test.txt\n', - }); - - expect(capturedHeaders).toBeDefined(); - expect(capturedHeaders?.['Code-Storage-Agent']).toBeDefined(); - expect(capturedHeaders?.['Code-Storage-Agent']).toMatch(/code-storage-sdk\/\d+\.\d+\.\d+/); - }); + const mockFetch = ensureMockFetch(); + let randomSpy: ReturnType | undefined; + + beforeEach(() => { + mockFetch.mockReset(); + mockFetch.mockImplementation(() => + Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'repo-id', url: 'https://example.git' }), + }) + ); + randomSpy = vi + .spyOn(globalThis.crypto, 'randomUUID') + .mockImplementation(() => 'cid-fixed' as any); + }); + + afterEach(() => { + randomSpy?.mockRestore(); + }); + + it('streams metadata and diff chunks in NDJSON order', async () => { + const store = new GitStorage({ name: 'v0', key }); + + const commitAck = { + commit: { + commit_sha: 'def456', + tree_sha: 'abc123', + target_branch: 'main', + pack_bytes: 84, + blob_count: 0, + }, + result: { + branch: 'main', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'def456', + success: true, + status: 'ok', + }, + }; + + // createRepo call + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'repo-main', url: 'https://repo.git' }), + }) + ); + + // diff commit call + mockFetch.mockImplementationOnce(async (url, init) => { + expect(String(url)).toMatch(/\/api\/v1\/repos\/diff-commit$/); + expect(init?.method).toBe('POST'); + const headers = init?.headers as Record; + expect(headers.Authorization).toMatch(/^Bearer\s.+/); + expect(headers['Content-Type']).toBe('application/x-ndjson'); + + const body = await readRequestBody(init?.body); + const lines = body.trim().split('\n'); + expect(lines).toHaveLength(2); + + const metadataFrame = JSON.parse(lines[0]); + expect(metadataFrame.metadata).toEqual({ + target_branch: 'main', + expected_head_sha: 'abc123', + commit_message: 'Apply patch', + author: { + name: 'Author Name', + email: 'author@example.com', + }, + }); + + const chunkFrame = JSON.parse(lines[1]); + expect(chunkFrame.diff_chunk.eof).toBe(true); + const decoded = Buffer.from( + chunkFrame.diff_chunk.data, + 'base64' + ).toString('utf8'); + expect(decoded).toBe('diff --git a/file.txt b/file.txt\n'); + + return { + ok: true, + status: 200, + json: async () => commitAck, + }; + }); + + const repo = await store.createRepo({ id: 'repo-main' }); + const result = await repo.createCommitFromDiff({ + targetBranch: 'main', + commitMessage: 'Apply patch', + expectedHeadSha: 'abc123', + author: { name: 'Author Name', email: 'author@example.com' }, + diff: 'diff --git a/file.txt b/file.txt\n', + }); + + expect(result).toEqual({ + commitSha: 'def456', + treeSha: 'abc123', + targetBranch: 'main', + packBytes: 84, + blobCount: 0, + refUpdate: { + branch: 'main', + oldSha: '0000000000000000000000000000000000000000', + newSha: 'def456', + }, + }); + }); + + it('requires diff content before sending', async () => { + const store = new GitStorage({ name: 'v0', key }); + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'repo-main', url: 'https://repo.git' }), + }) + ); + + const repo = await store.createRepo({ id: 'repo-main' }); + await expect( + repo.createCommitFromDiff({ + targetBranch: 'main', + commitMessage: 'Apply patch', + author: { name: 'Author', email: 'author@example.com' }, + diff: undefined as unknown as string, + }) + ).rejects.toThrow('createCommitFromDiff diff is required'); + }); + + it('converts error responses into RefUpdateError', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'repo-main', url: 'https://repo.git' }), + }) + ); + + mockFetch.mockImplementationOnce(async () => { + const response = { + ok: false, + status: 409, + statusText: 'Conflict', + async json() { + return { + result: { + status: 'conflict', + message: 'Head moved', + branch: 'main', + old_sha: 'abc', + new_sha: 'def', + }, + }; + }, + async text() { + return ''; + }, + clone() { + return this; + }, + }; + return response; + }); + + const repo = await store.createRepo({ id: 'repo-main' }); + + const promise = repo.createCommitFromDiff({ + targetBranch: 'refs/heads/main', + commitMessage: 'Apply patch', + expectedHeadSha: 'abc', + author: { name: 'Author', email: 'author@example.com' }, + diff: 'diff --git a/file.txt b/file.txt\n', + }); + + await expect(promise).rejects.toThrow(RefUpdateError); + + await promise.catch((error) => { + if (!(error instanceof RefUpdateError)) { + throw error; + } + expect(error.status).toBe('conflict'); + expect(error.refUpdate).toEqual({ + branch: 'main', + oldSha: 'abc', + newSha: 'def', + }); + }); + }); + + it('includes Code-Storage-Agent header in diff commit requests', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ + repo_id: 'repo-user-agent', + url: 'https://repo.git', + }), + }) + ); + + const commitAck = { + commit: { + commit_sha: 'useragent123', + tree_sha: 'tree456', + target_branch: 'main', + pack_bytes: 42, + blob_count: 0, + }, + result: { + branch: 'main', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'useragent123', + success: true, + status: 'ok', + }, + }; + + let capturedHeaders: Record | undefined; + mockFetch.mockImplementationOnce(async (_url, init) => { + capturedHeaders = init?.headers as Record; + return { + ok: true, + status: 200, + json: async () => commitAck, + }; + }); + + const repo = await store.createRepo({ id: 'repo-user-agent' }); + await repo.createCommitFromDiff({ + targetBranch: 'main', + commitMessage: 'Test user agent', + author: { name: 'Author Name', email: 'author@example.com' }, + diff: 'diff --git a/test.txt b/test.txt\n', + }); + + expect(capturedHeaders).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toMatch( + /code-storage-sdk\/\d+\.\d+\.\d+/ + ); + }); }); diff --git a/packages/git-storage-sdk-node/tests/commit.test.ts b/packages/git-storage-sdk-node/tests/commit.test.ts index f2af5bdbe..2a71ef836 100644 --- a/packages/git-storage-sdk-node/tests/commit.test.ts +++ b/packages/git-storage-sdk-node/tests/commit.test.ts @@ -1,6 +1,7 @@ -import { ReadableStream } from 'node:stream/web'; import { Blob } from 'buffer'; +import { ReadableStream } from 'node:stream/web'; import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; + import { GitStorage, RefUpdateError } from '../src/index'; const key = `-----BEGIN PRIVATE KEY----- @@ -10,11 +11,11 @@ yTh6suablSura7ZDG8hpm3oNsq/ykC3Scfsw6ZTuuVuLlXKV/be/Xr0d -----END PRIVATE KEY-----`; const decodeJwtPayload = (jwt: string) => { - const parts = jwt.split('.'); - if (parts.length !== 3) { - throw new Error('Invalid JWT format'); - } - return JSON.parse(Buffer.from(parts[1], 'base64url').toString()); + const parts = jwt.split('.'); + if (parts.length !== 3) { + throw new Error('Invalid JWT format'); + } + return JSON.parse(Buffer.from(parts[1], 'base64url').toString()); }; const stripBearer = (value: string): string => value.replace(/^Bearer\s+/i, ''); @@ -24,813 +25,852 @@ const MAX_CHUNK_BYTES = 4 * 1024 * 1024; type MockFetch = ReturnType; function ensureMockFetch(): MockFetch { - const existing = globalThis.fetch as unknown; - if (existing && typeof existing === 'function' && 'mock' in (existing as any)) { - return existing as MockFetch; - } - const mock = vi.fn(); - vi.stubGlobal('fetch', mock); - return mock; + const existing = globalThis.fetch as unknown; + if ( + existing && + typeof existing === 'function' && + 'mock' in (existing as any) + ) { + return existing as MockFetch; + } + const mock = vi.fn(); + vi.stubGlobal('fetch', mock); + return mock; } async function readRequestBody(body: unknown): Promise { - if (!body) { - return ''; - } - if (typeof body === 'string') { - return body; - } - if (body instanceof Uint8Array) { - return new TextDecoder().decode(body); - } - if (isReadableStream(body)) { - const reader = body.getReader(); - const chunks: Uint8Array[] = []; - while (true) { - const { value, done } = await reader.read(); - if (done) { - break; - } - if (value) { - chunks.push(value); - } - } - reader.releaseLock?.(); - return decodeChunks(chunks); - } - if (isAsyncIterable(body)) { - const chunks: Uint8Array[] = []; - for await (const value of body as AsyncIterable) { - if (value instanceof Uint8Array) { - chunks.push(value); - } else if (typeof value === 'string') { - chunks.push(new TextEncoder().encode(value)); - } else if (value instanceof ArrayBuffer) { - chunks.push(new Uint8Array(value)); - } - } - return decodeChunks(chunks); - } - return ''; + if (!body) { + return ''; + } + if (typeof body === 'string') { + return body; + } + if (body instanceof Uint8Array) { + return new TextDecoder().decode(body); + } + if (isReadableStream(body)) { + const reader = body.getReader(); + const chunks: Uint8Array[] = []; + while (true) { + const { value, done } = await reader.read(); + if (done) { + break; + } + if (value) { + chunks.push(value); + } + } + reader.releaseLock?.(); + return decodeChunks(chunks); + } + if (isAsyncIterable(body)) { + const chunks: Uint8Array[] = []; + for await (const value of body as AsyncIterable) { + if (value instanceof Uint8Array) { + chunks.push(value); + } else if (typeof value === 'string') { + chunks.push(new TextEncoder().encode(value)); + } else if (value instanceof ArrayBuffer) { + chunks.push(new Uint8Array(value)); + } + } + return decodeChunks(chunks); + } + return ''; } function isReadableStream(value: unknown): value is ReadableStream { - return ( - typeof value === 'object' && - value !== null && - typeof (value as ReadableStream).getReader === 'function' - ); + return ( + typeof value === 'object' && + value !== null && + typeof (value as ReadableStream).getReader === 'function' + ); } function isAsyncIterable(value: unknown): value is AsyncIterable { - return ( - typeof value === 'object' && - value !== null && - Symbol.asyncIterator in (value as Record) - ); + return ( + typeof value === 'object' && + value !== null && + Symbol.asyncIterator in (value as Record) + ); } function decodeChunks(chunks: Uint8Array[]): string { - if (chunks.length === 0) { - return ''; - } - if (chunks.length === 1) { - return new TextDecoder().decode(chunks[0]); - } - const total = chunks.reduce((sum, chunk) => sum + chunk.byteLength, 0); - const combined = new Uint8Array(total); - let offset = 0; - for (const chunk of chunks) { - combined.set(chunk, offset); - offset += chunk.byteLength; - } - return new TextDecoder().decode(combined); + if (chunks.length === 0) { + return ''; + } + if (chunks.length === 1) { + return new TextDecoder().decode(chunks[0]); + } + const total = chunks.reduce((sum, chunk) => sum + chunk.byteLength, 0); + const combined = new Uint8Array(total); + let offset = 0; + for (const chunk of chunks) { + combined.set(chunk, offset); + offset += chunk.byteLength; + } + return new TextDecoder().decode(combined); } describe('createCommit builder', () => { - const mockFetch = ensureMockFetch(); - let randomSpy: ReturnType | undefined; - - beforeEach(() => { - mockFetch.mockReset(); - mockFetch.mockImplementation(() => - Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ repo_id: 'repo-id', url: 'https://example.git' }), - }), - ); - randomSpy = vi - .spyOn(globalThis.crypto, 'randomUUID') - .mockImplementation(() => 'cid-fixed' as any); - }); - - afterEach(() => { - randomSpy?.mockRestore(); - }); - - it('streams metadata and blob chunks in NDJSON order', async () => { - const store = new GitStorage({ name: 'v0', key }); - - const commitAck = { - commit: { - commit_sha: 'abc123', - tree_sha: 'def456', - target_branch: 'main', - pack_bytes: 42, - blob_count: 1, - }, - result: { - branch: 'main', - old_sha: '0000000000000000000000000000000000000000', - new_sha: 'abc123', - success: true, - status: 'ok', - }, - }; - - // createRepo call - mockFetch.mockImplementationOnce(() => - Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ repo_id: 'repo-main', url: 'https://repo.git' }), - }), - ); - - // commit call - mockFetch.mockImplementationOnce(async (_url, init) => { - expect(init?.method).toBe('POST'); - const headers = init?.headers as Record; - expect(headers.Authorization).toMatch(/^Bearer\s.+/); - expect(headers['Content-Type']).toBe('application/x-ndjson'); - - const body = await readRequestBody(init?.body); - const lines = body.trim().split('\n'); - expect(lines).toHaveLength(2); - - const metadataFrame = JSON.parse(lines[0]); - expect(metadataFrame.metadata.commit_message).toBe('Update docs'); - expect(metadataFrame.metadata.author).toEqual({ - name: 'Author Name', - email: 'author@example.com', - }); - expect(metadataFrame.metadata.files).toEqual([ - expect.objectContaining({ - path: 'docs/readme.md', - operation: 'upsert', - content_id: 'cid-fixed', - }), - expect.objectContaining({ path: 'docs/old.txt', operation: 'delete' }), - ]); - expect(metadataFrame.metadata).not.toHaveProperty('ephemeral'); - expect(metadataFrame.metadata).not.toHaveProperty('ephemeral_base'); - - const chunkFrame = JSON.parse(lines[1]); - expect(chunkFrame.blob_chunk.content_id).toBe('cid-fixed'); - expect(chunkFrame.blob_chunk.eof).toBe(true); - const decoded = Buffer.from(chunkFrame.blob_chunk.data, 'base64').toString('utf8'); - expect(decoded).toBe('# v2.0.1\n- add streaming SDK\n'); - - return { - ok: true, - status: 200, - json: async () => commitAck, - }; - }); - - const repo = await store.createRepo({ id: 'repo-main' }); - const response = await repo - .createCommit({ - targetBranch: 'main', - commitMessage: 'Update docs', - author: { name: 'Author Name', email: 'author@example.com' }, - }) - .addFileFromString('docs/readme.md', '# v2.0.1\n- add streaming SDK\n') - .deletePath('docs/old.txt') - .send(); - - expect(response).toEqual({ - commitSha: 'abc123', - treeSha: 'def456', - targetBranch: 'main', - packBytes: 42, - blobCount: 1, - refUpdate: { - branch: 'main', - oldSha: '0000000000000000000000000000000000000000', - newSha: 'abc123', - }, - }); - expect(response.refUpdate.oldSha).toHaveLength(40); - }); - - it('includes base_branch metadata when provided', async () => { - const store = new GitStorage({ name: 'v0', key }); - - mockFetch.mockImplementationOnce(() => - Promise.resolve({ - ok: true, - status: 200, - json: async () => ({ repo_id: 'repo-base', url: 'https://repo.git' }), - }), - ); - - mockFetch.mockImplementationOnce(async (_url, init) => { - const body = await readRequestBody(init?.body); - const frames = body - .trim() - .split('\n') - .map((line) => JSON.parse(line)); - const metadata = frames[0].metadata; - expect(metadata.target_branch).toBe('feature/one'); - expect(metadata.expected_head_sha).toBe('abc123'); - expect(metadata.base_branch).toBe('main'); - return { - ok: true, - status: 200, - json: async () => ({ - commit: { - commit_sha: 'deadbeef', - tree_sha: 'cafebabe', - target_branch: 'feature/one', - pack_bytes: 1, - blob_count: 0, - }, - result: { - branch: 'feature/one', - old_sha: '0000000000000000000000000000000000000000', - new_sha: 'deadbeef', - success: true, - status: 'ok', - }, - }), - }; - }); - - const repo = await store.createRepo({ id: 'repo-base' }); - await repo - .createCommit({ - targetBranch: 'feature/one', - baseBranch: 'main', - expectedHeadSha: 'abc123', - commitMessage: 'branch off main', - author: { name: 'Author Name', email: 'author@example.com' }, - }) - .addFileFromString('docs/base.txt', 'hello') - .send(); - }); - - it('allows base_branch without expectedHeadSha', async () => { - const store = new GitStorage({ name: 'v0', key }); - - mockFetch.mockImplementationOnce(() => - Promise.resolve({ - ok: true, - status: 200, - json: async () => ({ repo_id: 'repo-base-no-head', url: 'https://repo.git' }), - }), - ); - - mockFetch.mockImplementationOnce(async (_url, init) => { - const body = await readRequestBody(init?.body); - const metadata = JSON.parse(body.trim().split('\n')[0]).metadata; - expect(metadata.base_branch).toBe('main'); - expect(metadata).not.toHaveProperty('expected_head_sha'); - return { - ok: true, - status: 200, - json: async () => ({ - commit: { - commit_sha: 'abc123', - tree_sha: 'def456', - target_branch: 'feature/one', - pack_bytes: 1, - blob_count: 1, - }, - result: { - branch: 'feature/one', - old_sha: '0000000000000000000000000000000000000000', - new_sha: 'abc123', - success: true, - status: 'ok', - }, - }), - }; - }); - - const repo = await store.createRepo({ id: 'repo-base-no-head' }); - await repo - .createCommit({ - targetBranch: 'feature/one', - baseBranch: 'main', - commitMessage: 'branch off', - author: { name: 'Author Name', email: 'author@example.com' }, - }) - .addFileFromString('docs/base.txt', 'hello') - .send(); - }); - - it('includes ephemeral flags when requested', async () => { - const store = new GitStorage({ name: 'v0', key }); - - mockFetch.mockImplementationOnce(() => - Promise.resolve({ - ok: true, - status: 200, - json: async () => ({ repo_id: 'repo-ephemeral', url: 'https://repo.git' }), - }), - ); - - const commitAck = { - commit: { - commit_sha: 'eph123', - tree_sha: 'eph456', - target_branch: 'feature/demo', - pack_bytes: 1, - blob_count: 1, - }, - result: { - branch: 'feature/demo', - old_sha: '0000000000000000000000000000000000000000', - new_sha: 'eph123', - success: true, - status: 'ok', - }, - }; - - mockFetch.mockImplementationOnce(async (_url, init) => { - const body = await readRequestBody(init?.body); - const frames = body - .trim() - .split('\n') - .map((line) => JSON.parse(line)); - const metadata = frames[0].metadata; - expect(metadata.target_branch).toBe('feature/demo'); - expect(metadata.base_branch).toBe('feature/base'); - expect(metadata.ephemeral).toBe(true); - expect(metadata.ephemeral_base).toBe(true); - return { - ok: true, - status: 200, - json: async () => commitAck, - }; - }); - - const repo = await store.createRepo({ id: 'repo-ephemeral' }); - await repo - .createCommit({ - targetBranch: 'feature/demo', - baseBranch: 'feature/base', - ephemeral: true, - ephemeralBase: true, - commitMessage: 'ephemeral commit', - author: { name: 'Author Name', email: 'author@example.com' }, - }) - .addFileFromString('docs/ephemeral.txt', 'hello') - .send(); - }); - - it('accepts Blob and ReadableStream sources', async () => { - randomSpy?.mockRestore(); - const ids = ['blob-source', 'stream-source']; - randomSpy = vi - .spyOn(globalThis.crypto, 'randomUUID') - .mockImplementation(() => ids.shift() ?? 'overflow'); - - const store = new GitStorage({ name: 'v0', key }); - - const commitAck = { - commit: { - commit_sha: 'feedbeef', - tree_sha: 'c0ffee42', - target_branch: 'main', - pack_bytes: 128, - blob_count: 2, - }, - result: { - branch: 'main', - old_sha: '0000000000000000000000000000000000000000', - new_sha: 'feedbeef', - success: true, - status: 'ok', - }, - }; - - mockFetch.mockImplementationOnce(() => - Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ repo_id: 'repo-blobs', url: 'https://repo.git' }), - }), - ); - - mockFetch.mockImplementationOnce(async (_url, init) => { - const body = await readRequestBody(init?.body); - const frames = body - .trim() - .split('\n') - .map((line) => JSON.parse(line)); - const metadata = frames[0].metadata; - expect(metadata.files).toEqual([ - expect.objectContaining({ path: 'assets/blob.bin', content_id: 'blob-source' }), - expect.objectContaining({ path: 'assets/stream.bin', content_id: 'stream-source' }), - ]); - - const chunkFrames = frames.slice(1).map((frame) => frame.blob_chunk); - expect(chunkFrames).toHaveLength(2); - const decoded = Object.fromEntries( - chunkFrames.map((chunk) => [ - chunk.content_id, - Buffer.from(chunk.data, 'base64').toString('utf8'), - ]), - ); - expect(decoded['blob-source']).toBe('blob-payload'); - expect(decoded['stream-source']).toBe('streamed-payload'); - - return { - ok: true, - status: 200, - json: async () => commitAck, - }; - }); - - const repo = await store.createRepo({ id: 'repo-blobs' }); - const blob = new Blob(['blob-payload'], { type: 'text/plain' }); - const readable = new ReadableStream({ - start(controller) { - controller.enqueue(new TextEncoder().encode('streamed-payload')); - controller.close(); - }, - }); - - const result = await repo - .createCommit({ - targetBranch: 'main', - commitMessage: 'Add mixed sources', - author: { name: 'Author Name', email: 'author@example.com' }, - }) - .addFile('assets/blob.bin', blob) - .addFile('assets/stream.bin', readable) - .send(); - - expect(result.commitSha).toBe('feedbeef'); - expect(result.refUpdate.newSha).toBe('feedbeef'); - }); - - it('splits large payloads into <=4MiB chunks', async () => { - const store = new GitStorage({ name: 'v0', key }); - - mockFetch.mockImplementationOnce(() => - Promise.resolve({ - ok: true, - status: 200, - json: async () => ({ repo_id: 'repo-chunk', url: 'https://repo.git' }), - }), - ); - - mockFetch.mockImplementationOnce(async (_url, init) => { - const body = await readRequestBody(init?.body); - const lines = body.trim().split('\n'); - expect(lines.length).toBe(3); - - const firstChunk = JSON.parse(lines[1]).blob_chunk; - const secondChunk = JSON.parse(lines[2]).blob_chunk; - - expect(Buffer.from(firstChunk.data, 'base64')).toHaveLength(MAX_CHUNK_BYTES); - expect(firstChunk.eof).toBe(false); - - expect(Buffer.from(secondChunk.data, 'base64')).toHaveLength(10); - expect(secondChunk.eof).toBe(true); - - return { - ok: true, - status: 200, - json: async () => ({ - commit: { - commit_sha: 'chunk123', - tree_sha: 'tree456', - target_branch: 'main', - pack_bytes: MAX_CHUNK_BYTES + 10, - blob_count: 1, - }, - result: { - branch: 'main', - old_sha: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', - new_sha: 'chunk123', - success: true, - status: 'ok', - }, - }), - }; - }); - - const repo = await store.createRepo({ id: 'repo-chunk' }); - const payload = new Uint8Array(MAX_CHUNK_BYTES + 10).fill(0x61); // 'a' - - const result = await repo - .createCommit({ - targetBranch: 'main', - commitMessage: 'Large commit', - author: { name: 'Author Name', email: 'author@example.com' }, - }) - .addFile('large.bin', payload) - .send(); - - expect(result.refUpdate.oldSha).toHaveLength(40); - expect(result.refUpdate.newSha).toBe('chunk123'); - }); - - it('throws when author is missing', async () => { - const store = new GitStorage({ name: 'v0', key }); - - mockFetch.mockImplementationOnce(() => - Promise.resolve({ - ok: true, - status: 200, - json: async () => ({ repo_id: 'repo-missing-author', url: 'https://repo.git' }), - }), - ); - - const repo = await store.createRepo({ id: 'repo-missing-author' }); - expect(() => - repo.createCommit({ targetBranch: 'main', commitMessage: 'Missing author' }), - ).toThrow('createCommit author name and email are required'); - }); - - it('accepts legacy targetRef for backwards compatibility', async () => { - const store = new GitStorage({ name: 'v0', key }); - - mockFetch.mockImplementationOnce(() => - Promise.resolve({ - ok: true, - status: 200, - json: async () => ({ repo_id: 'repo-legacy-target-ref', url: 'https://repo.git' }), - }), - ); - - const commitAck = { - commit: { - commit_sha: 'legacy123', - tree_sha: 'legacy456', - target_branch: 'main', - pack_bytes: 0, - blob_count: 0, - }, - result: { - branch: 'main', - old_sha: '0000000000000000000000000000000000000000', - new_sha: 'legacy123', - success: true, - status: 'ok', - }, - }; - - mockFetch.mockImplementationOnce(async (_url, init) => { - const body = await readRequestBody(init?.body); - const [metadataLine] = body.trim().split('\n'); - const payload = JSON.parse(metadataLine); - expect(payload.metadata.target_branch).toBe('main'); - return { - ok: true, - status: 200, - json: async () => commitAck, - }; - }); - - const repo = await store.createRepo({ id: 'repo-legacy-target-ref' }); - const response = await repo - .createCommit({ - targetRef: 'refs/heads/main', - commitMessage: 'Legacy path', - author: { name: 'Legacy Author', email: 'legacy@example.com' }, - }) - .send(); - - expect(response.targetBranch).toBe('main'); - expect(response.commitSha).toBe('legacy123'); - }); - - it('supports non-UTF encodings when Buffer is available', async () => { - const store = new GitStorage({ name: 'v0', key }); - - mockFetch.mockImplementationOnce(() => - Promise.resolve({ - ok: true, - status: 200, - json: async () => ({ repo_id: 'repo-enc', url: 'https://repo.git' }), - }), - ); - - mockFetch.mockImplementationOnce(async (_url, init) => { - const body = await readRequestBody(init?.body); - const lines = body.trim().split('\n'); - expect(lines).toHaveLength(2); - const chunk = JSON.parse(lines[1]).blob_chunk; - const decoded = Buffer.from(chunk.data, 'base64').toString('latin1'); - expect(decoded).toBe('\u00a1Hola!'); - return { - ok: true, - status: 200, - json: async () => ({ - commit: { - commit_sha: 'enc123', - tree_sha: 'treeenc', - target_branch: 'main', - pack_bytes: 12, - blob_count: 1, - }, - result: { - branch: 'main', - old_sha: '0000000000000000000000000000000000000000', - new_sha: 'enc123', - success: true, - status: 'ok', - }, - }), - }; - }); - - const repo = await store.createRepo({ id: 'repo-enc' }); - await repo - .createCommit({ - targetBranch: 'main', - commitMessage: 'Add latin1 greeting', - author: { name: 'Author Name', email: 'author@example.com' }, - }) - .addFileFromString('docs/hola.txt', '\u00a1Hola!', { encoding: 'latin1' }) - .send(); - }); - - it('honors deprecated ttl option when sending commits', async () => { - const store = new GitStorage({ name: 'v0', key }); - const legacyTTL = 4321; - - mockFetch.mockImplementationOnce(() => - Promise.resolve({ - ok: true, - status: 200, - json: async () => ({ repo_id: 'repo-legacy-ttl', url: 'https://repo.git' }), - }), - ); - - const commitAck = { - commit: { - commit_sha: 'legacy123', - tree_sha: 'treetree', - target_branch: 'main', - pack_bytes: 16, - blob_count: 1, - }, - result: { - branch: 'main', - old_sha: '0000000000000000000000000000000000000000', - new_sha: 'legacy123', - success: true, - status: 'ok', - }, - }; - - let authHeader: string | undefined; - mockFetch.mockImplementationOnce(async (_url, init) => { - authHeader = (init?.headers as Record | undefined)?.Authorization; - return { - ok: true, - status: 200, - json: async () => commitAck, - }; - }); - - const repo = await store.createRepo({ id: 'repo-legacy-ttl' }); - await repo - .createCommit({ - targetBranch: 'main', - commitMessage: 'Legacy ttl commit', - author: { name: 'Author Name', email: 'author@example.com' }, - ttl: legacyTTL, - }) - .addFileFromString('docs/legacy.txt', 'legacy ttl content') - .send(); - - expect(authHeader).toBeDefined(); - const payload = decodeJwtPayload(stripBearer(authHeader!)); - expect(payload.exp - payload.iat).toBe(legacyTTL); - }); - - it('rejects baseBranch values with refs prefix', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({ id: 'repo-base-prefix' }); - expect(() => - repo.createCommit({ - targetBranch: 'feature/two', - baseBranch: 'refs/heads/main', - expectedHeadSha: 'abc123', - commitMessage: 'branch', - author: { name: 'Author Name', email: 'author@example.com' }, - }), - ).toThrow('createCommit baseBranch must not include refs/ prefix'); - }); - - it('throws RefUpdateError when backend reports failure', async () => { - const store = new GitStorage({ name: 'v0', key }); - - mockFetch.mockImplementationOnce(() => - Promise.resolve({ - ok: true, - status: 200, - json: async () => ({ repo_id: 'repo-fail', url: 'https://repo.git' }), - }), - ); - - mockFetch.mockImplementationOnce(async () => ({ - ok: true, - status: 200, - json: async () => ({ - commit: { - commit_sha: 'deadbeef', - tree_sha: 'feedbabe', - target_branch: 'main', - pack_bytes: 0, - blob_count: 0, - }, - result: { - branch: 'main', - old_sha: '1234567890123456789012345678901234567890', - new_sha: 'deadbeef', - success: false, - status: 'precondition_failed', - message: 'base mismatch', - }, - }), - })); - - const repo = await store.createRepo({ id: 'repo-fail' }); - - await expect( - repo - .createCommit({ - targetBranch: 'main', - commitMessage: 'bad commit', - author: { name: 'Author Name', email: 'author@example.com' }, - }) - .addFileFromString('docs/readme.md', 'oops') - .send(), - ).rejects.toBeInstanceOf(RefUpdateError); - }); - - it('includes Code-Storage-Agent header in commit requests', async () => { - const store = new GitStorage({ name: 'v0', key }); - - mockFetch.mockImplementationOnce(() => - Promise.resolve({ - ok: true, - status: 200, - json: async () => ({ repo_id: 'repo-user-agent', url: 'https://repo.git' }), - }), - ); - - const commitAck = { - commit: { - commit_sha: 'useragent123', - tree_sha: 'tree456', - target_branch: 'main', - pack_bytes: 10, - blob_count: 1, - }, - result: { - branch: 'main', - old_sha: '0000000000000000000000000000000000000000', - new_sha: 'useragent123', - success: true, - status: 'ok', - }, - }; - - let capturedHeaders: Record | undefined; - mockFetch.mockImplementationOnce(async (_url, init) => { - capturedHeaders = init?.headers as Record; - return { - ok: true, - status: 200, - json: async () => commitAck, - }; - }); - - const repo = await store.createRepo({ id: 'repo-user-agent' }); - await repo - .createCommit({ - targetBranch: 'main', - commitMessage: 'Test user agent', - author: { name: 'Author Name', email: 'author@example.com' }, - }) - .addFileFromString('test.txt', 'test') - .send(); - - expect(capturedHeaders).toBeDefined(); - expect(capturedHeaders?.['Code-Storage-Agent']).toBeDefined(); - expect(capturedHeaders?.['Code-Storage-Agent']).toMatch(/code-storage-sdk\/\d+\.\d+\.\d+/); - }); + const mockFetch = ensureMockFetch(); + let randomSpy: ReturnType | undefined; + + beforeEach(() => { + mockFetch.mockReset(); + mockFetch.mockImplementation(() => + Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'repo-id', url: 'https://example.git' }), + }) + ); + randomSpy = vi + .spyOn(globalThis.crypto, 'randomUUID') + .mockImplementation(() => 'cid-fixed' as any); + }); + + afterEach(() => { + randomSpy?.mockRestore(); + }); + + it('streams metadata and blob chunks in NDJSON order', async () => { + const store = new GitStorage({ name: 'v0', key }); + + const commitAck = { + commit: { + commit_sha: 'abc123', + tree_sha: 'def456', + target_branch: 'main', + pack_bytes: 42, + blob_count: 1, + }, + result: { + branch: 'main', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'abc123', + success: true, + status: 'ok', + }, + }; + + // createRepo call + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'repo-main', url: 'https://repo.git' }), + }) + ); + + // commit call + mockFetch.mockImplementationOnce(async (_url, init) => { + expect(init?.method).toBe('POST'); + const headers = init?.headers as Record; + expect(headers.Authorization).toMatch(/^Bearer\s.+/); + expect(headers['Content-Type']).toBe('application/x-ndjson'); + + const body = await readRequestBody(init?.body); + const lines = body.trim().split('\n'); + expect(lines).toHaveLength(2); + + const metadataFrame = JSON.parse(lines[0]); + expect(metadataFrame.metadata.commit_message).toBe('Update docs'); + expect(metadataFrame.metadata.author).toEqual({ + name: 'Author Name', + email: 'author@example.com', + }); + expect(metadataFrame.metadata.files).toEqual([ + expect.objectContaining({ + path: 'docs/readme.md', + operation: 'upsert', + content_id: 'cid-fixed', + }), + expect.objectContaining({ path: 'docs/old.txt', operation: 'delete' }), + ]); + expect(metadataFrame.metadata).not.toHaveProperty('ephemeral'); + expect(metadataFrame.metadata).not.toHaveProperty('ephemeral_base'); + + const chunkFrame = JSON.parse(lines[1]); + expect(chunkFrame.blob_chunk.content_id).toBe('cid-fixed'); + expect(chunkFrame.blob_chunk.eof).toBe(true); + const decoded = Buffer.from( + chunkFrame.blob_chunk.data, + 'base64' + ).toString('utf8'); + expect(decoded).toBe('# v2.0.1\n- add streaming SDK\n'); + + return { + ok: true, + status: 200, + json: async () => commitAck, + }; + }); + + const repo = await store.createRepo({ id: 'repo-main' }); + const response = await repo + .createCommit({ + targetBranch: 'main', + commitMessage: 'Update docs', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + .addFileFromString('docs/readme.md', '# v2.0.1\n- add streaming SDK\n') + .deletePath('docs/old.txt') + .send(); + + expect(response).toEqual({ + commitSha: 'abc123', + treeSha: 'def456', + targetBranch: 'main', + packBytes: 42, + blobCount: 1, + refUpdate: { + branch: 'main', + oldSha: '0000000000000000000000000000000000000000', + newSha: 'abc123', + }, + }); + expect(response.refUpdate.oldSha).toHaveLength(40); + }); + + it('includes base_branch metadata when provided', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ repo_id: 'repo-base', url: 'https://repo.git' }), + }) + ); + + mockFetch.mockImplementationOnce(async (_url, init) => { + const body = await readRequestBody(init?.body); + const frames = body + .trim() + .split('\n') + .map((line) => JSON.parse(line)); + const metadata = frames[0].metadata; + expect(metadata.target_branch).toBe('feature/one'); + expect(metadata.expected_head_sha).toBe('abc123'); + expect(metadata.base_branch).toBe('main'); + return { + ok: true, + status: 200, + json: async () => ({ + commit: { + commit_sha: 'deadbeef', + tree_sha: 'cafebabe', + target_branch: 'feature/one', + pack_bytes: 1, + blob_count: 0, + }, + result: { + branch: 'feature/one', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'deadbeef', + success: true, + status: 'ok', + }, + }), + }; + }); + + const repo = await store.createRepo({ id: 'repo-base' }); + await repo + .createCommit({ + targetBranch: 'feature/one', + baseBranch: 'main', + expectedHeadSha: 'abc123', + commitMessage: 'branch off main', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + .addFileFromString('docs/base.txt', 'hello') + .send(); + }); + + it('allows base_branch without expectedHeadSha', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ + repo_id: 'repo-base-no-head', + url: 'https://repo.git', + }), + }) + ); + + mockFetch.mockImplementationOnce(async (_url, init) => { + const body = await readRequestBody(init?.body); + const metadata = JSON.parse(body.trim().split('\n')[0]).metadata; + expect(metadata.base_branch).toBe('main'); + expect(metadata).not.toHaveProperty('expected_head_sha'); + return { + ok: true, + status: 200, + json: async () => ({ + commit: { + commit_sha: 'abc123', + tree_sha: 'def456', + target_branch: 'feature/one', + pack_bytes: 1, + blob_count: 1, + }, + result: { + branch: 'feature/one', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'abc123', + success: true, + status: 'ok', + }, + }), + }; + }); + + const repo = await store.createRepo({ id: 'repo-base-no-head' }); + await repo + .createCommit({ + targetBranch: 'feature/one', + baseBranch: 'main', + commitMessage: 'branch off', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + .addFileFromString('docs/base.txt', 'hello') + .send(); + }); + + it('includes ephemeral flags when requested', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ + repo_id: 'repo-ephemeral', + url: 'https://repo.git', + }), + }) + ); + + const commitAck = { + commit: { + commit_sha: 'eph123', + tree_sha: 'eph456', + target_branch: 'feature/demo', + pack_bytes: 1, + blob_count: 1, + }, + result: { + branch: 'feature/demo', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'eph123', + success: true, + status: 'ok', + }, + }; + + mockFetch.mockImplementationOnce(async (_url, init) => { + const body = await readRequestBody(init?.body); + const frames = body + .trim() + .split('\n') + .map((line) => JSON.parse(line)); + const metadata = frames[0].metadata; + expect(metadata.target_branch).toBe('feature/demo'); + expect(metadata.base_branch).toBe('feature/base'); + expect(metadata.ephemeral).toBe(true); + expect(metadata.ephemeral_base).toBe(true); + return { + ok: true, + status: 200, + json: async () => commitAck, + }; + }); + + const repo = await store.createRepo({ id: 'repo-ephemeral' }); + await repo + .createCommit({ + targetBranch: 'feature/demo', + baseBranch: 'feature/base', + ephemeral: true, + ephemeralBase: true, + commitMessage: 'ephemeral commit', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + .addFileFromString('docs/ephemeral.txt', 'hello') + .send(); + }); + + it('accepts Blob and ReadableStream sources', async () => { + randomSpy?.mockRestore(); + const ids = ['blob-source', 'stream-source']; + randomSpy = vi + .spyOn(globalThis.crypto, 'randomUUID') + .mockImplementation(() => ids.shift() ?? 'overflow'); + + const store = new GitStorage({ name: 'v0', key }); + + const commitAck = { + commit: { + commit_sha: 'feedbeef', + tree_sha: 'c0ffee42', + target_branch: 'main', + pack_bytes: 128, + blob_count: 2, + }, + result: { + branch: 'main', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'feedbeef', + success: true, + status: 'ok', + }, + }; + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ repo_id: 'repo-blobs', url: 'https://repo.git' }), + }) + ); + + mockFetch.mockImplementationOnce(async (_url, init) => { + const body = await readRequestBody(init?.body); + const frames = body + .trim() + .split('\n') + .map((line) => JSON.parse(line)); + const metadata = frames[0].metadata; + expect(metadata.files).toEqual([ + expect.objectContaining({ + path: 'assets/blob.bin', + content_id: 'blob-source', + }), + expect.objectContaining({ + path: 'assets/stream.bin', + content_id: 'stream-source', + }), + ]); + + const chunkFrames = frames.slice(1).map((frame) => frame.blob_chunk); + expect(chunkFrames).toHaveLength(2); + const decoded = Object.fromEntries( + chunkFrames.map((chunk) => [ + chunk.content_id, + Buffer.from(chunk.data, 'base64').toString('utf8'), + ]) + ); + expect(decoded['blob-source']).toBe('blob-payload'); + expect(decoded['stream-source']).toBe('streamed-payload'); + + return { + ok: true, + status: 200, + json: async () => commitAck, + }; + }); + + const repo = await store.createRepo({ id: 'repo-blobs' }); + const blob = new Blob(['blob-payload'], { type: 'text/plain' }); + const readable = new ReadableStream({ + start(controller) { + controller.enqueue(new TextEncoder().encode('streamed-payload')); + controller.close(); + }, + }); + + const result = await repo + .createCommit({ + targetBranch: 'main', + commitMessage: 'Add mixed sources', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + .addFile('assets/blob.bin', blob) + .addFile('assets/stream.bin', readable) + .send(); + + expect(result.commitSha).toBe('feedbeef'); + expect(result.refUpdate.newSha).toBe('feedbeef'); + }); + + it('splits large payloads into <=4MiB chunks', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ repo_id: 'repo-chunk', url: 'https://repo.git' }), + }) + ); + + mockFetch.mockImplementationOnce(async (_url, init) => { + const body = await readRequestBody(init?.body); + const lines = body.trim().split('\n'); + expect(lines.length).toBe(3); + + const firstChunk = JSON.parse(lines[1]).blob_chunk; + const secondChunk = JSON.parse(lines[2]).blob_chunk; + + expect(Buffer.from(firstChunk.data, 'base64')).toHaveLength( + MAX_CHUNK_BYTES + ); + expect(firstChunk.eof).toBe(false); + + expect(Buffer.from(secondChunk.data, 'base64')).toHaveLength(10); + expect(secondChunk.eof).toBe(true); + + return { + ok: true, + status: 200, + json: async () => ({ + commit: { + commit_sha: 'chunk123', + tree_sha: 'tree456', + target_branch: 'main', + pack_bytes: MAX_CHUNK_BYTES + 10, + blob_count: 1, + }, + result: { + branch: 'main', + old_sha: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', + new_sha: 'chunk123', + success: true, + status: 'ok', + }, + }), + }; + }); + + const repo = await store.createRepo({ id: 'repo-chunk' }); + const payload = new Uint8Array(MAX_CHUNK_BYTES + 10).fill(0x61); // 'a' + + const result = await repo + .createCommit({ + targetBranch: 'main', + commitMessage: 'Large commit', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + .addFile('large.bin', payload) + .send(); + + expect(result.refUpdate.oldSha).toHaveLength(40); + expect(result.refUpdate.newSha).toBe('chunk123'); + }); + + it('throws when author is missing', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ + repo_id: 'repo-missing-author', + url: 'https://repo.git', + }), + }) + ); + + const repo = await store.createRepo({ id: 'repo-missing-author' }); + expect(() => + repo.createCommit({ + targetBranch: 'main', + commitMessage: 'Missing author', + }) + ).toThrow('createCommit author name and email are required'); + }); + + it('accepts legacy targetRef for backwards compatibility', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ + repo_id: 'repo-legacy-target-ref', + url: 'https://repo.git', + }), + }) + ); + + const commitAck = { + commit: { + commit_sha: 'legacy123', + tree_sha: 'legacy456', + target_branch: 'main', + pack_bytes: 0, + blob_count: 0, + }, + result: { + branch: 'main', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'legacy123', + success: true, + status: 'ok', + }, + }; + + mockFetch.mockImplementationOnce(async (_url, init) => { + const body = await readRequestBody(init?.body); + const [metadataLine] = body.trim().split('\n'); + const payload = JSON.parse(metadataLine); + expect(payload.metadata.target_branch).toBe('main'); + return { + ok: true, + status: 200, + json: async () => commitAck, + }; + }); + + const repo = await store.createRepo({ id: 'repo-legacy-target-ref' }); + const response = await repo + .createCommit({ + targetRef: 'refs/heads/main', + commitMessage: 'Legacy path', + author: { name: 'Legacy Author', email: 'legacy@example.com' }, + }) + .send(); + + expect(response.targetBranch).toBe('main'); + expect(response.commitSha).toBe('legacy123'); + }); + + it('supports non-UTF encodings when Buffer is available', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ repo_id: 'repo-enc', url: 'https://repo.git' }), + }) + ); + + mockFetch.mockImplementationOnce(async (_url, init) => { + const body = await readRequestBody(init?.body); + const lines = body.trim().split('\n'); + expect(lines).toHaveLength(2); + const chunk = JSON.parse(lines[1]).blob_chunk; + const decoded = Buffer.from(chunk.data, 'base64').toString('latin1'); + expect(decoded).toBe('\u00a1Hola!'); + return { + ok: true, + status: 200, + json: async () => ({ + commit: { + commit_sha: 'enc123', + tree_sha: 'treeenc', + target_branch: 'main', + pack_bytes: 12, + blob_count: 1, + }, + result: { + branch: 'main', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'enc123', + success: true, + status: 'ok', + }, + }), + }; + }); + + const repo = await store.createRepo({ id: 'repo-enc' }); + await repo + .createCommit({ + targetBranch: 'main', + commitMessage: 'Add latin1 greeting', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + .addFileFromString('docs/hola.txt', '\u00a1Hola!', { encoding: 'latin1' }) + .send(); + }); + + it('honors deprecated ttl option when sending commits', async () => { + const store = new GitStorage({ name: 'v0', key }); + const legacyTTL = 4321; + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ + repo_id: 'repo-legacy-ttl', + url: 'https://repo.git', + }), + }) + ); + + const commitAck = { + commit: { + commit_sha: 'legacy123', + tree_sha: 'treetree', + target_branch: 'main', + pack_bytes: 16, + blob_count: 1, + }, + result: { + branch: 'main', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'legacy123', + success: true, + status: 'ok', + }, + }; + + let authHeader: string | undefined; + mockFetch.mockImplementationOnce(async (_url, init) => { + authHeader = (init?.headers as Record | undefined) + ?.Authorization; + return { + ok: true, + status: 200, + json: async () => commitAck, + }; + }); + + const repo = await store.createRepo({ id: 'repo-legacy-ttl' }); + await repo + .createCommit({ + targetBranch: 'main', + commitMessage: 'Legacy ttl commit', + author: { name: 'Author Name', email: 'author@example.com' }, + ttl: legacyTTL, + }) + .addFileFromString('docs/legacy.txt', 'legacy ttl content') + .send(); + + expect(authHeader).toBeDefined(); + const payload = decodeJwtPayload(stripBearer(authHeader!)); + expect(payload.exp - payload.iat).toBe(legacyTTL); + }); + + it('rejects baseBranch values with refs prefix', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'repo-base-prefix' }); + expect(() => + repo.createCommit({ + targetBranch: 'feature/two', + baseBranch: 'refs/heads/main', + expectedHeadSha: 'abc123', + commitMessage: 'branch', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + ).toThrow('createCommit baseBranch must not include refs/ prefix'); + }); + + it('throws RefUpdateError when backend reports failure', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ repo_id: 'repo-fail', url: 'https://repo.git' }), + }) + ); + + mockFetch.mockImplementationOnce(async () => ({ + ok: true, + status: 200, + json: async () => ({ + commit: { + commit_sha: 'deadbeef', + tree_sha: 'feedbabe', + target_branch: 'main', + pack_bytes: 0, + blob_count: 0, + }, + result: { + branch: 'main', + old_sha: '1234567890123456789012345678901234567890', + new_sha: 'deadbeef', + success: false, + status: 'precondition_failed', + message: 'base mismatch', + }, + }), + })); + + const repo = await store.createRepo({ id: 'repo-fail' }); + + await expect( + repo + .createCommit({ + targetBranch: 'main', + commitMessage: 'bad commit', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + .addFileFromString('docs/readme.md', 'oops') + .send() + ).rejects.toBeInstanceOf(RefUpdateError); + }); + + it('includes Code-Storage-Agent header in commit requests', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ + repo_id: 'repo-user-agent', + url: 'https://repo.git', + }), + }) + ); + + const commitAck = { + commit: { + commit_sha: 'useragent123', + tree_sha: 'tree456', + target_branch: 'main', + pack_bytes: 10, + blob_count: 1, + }, + result: { + branch: 'main', + old_sha: '0000000000000000000000000000000000000000', + new_sha: 'useragent123', + success: true, + status: 'ok', + }, + }; + + let capturedHeaders: Record | undefined; + mockFetch.mockImplementationOnce(async (_url, init) => { + capturedHeaders = init?.headers as Record; + return { + ok: true, + status: 200, + json: async () => commitAck, + }; + }); + + const repo = await store.createRepo({ id: 'repo-user-agent' }); + await repo + .createCommit({ + targetBranch: 'main', + commitMessage: 'Test user agent', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + .addFileFromString('test.txt', 'test') + .send(); + + expect(capturedHeaders).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toMatch( + /code-storage-sdk\/\d+\.\d+\.\d+/ + ); + }); }); diff --git a/packages/git-storage-sdk-node/tests/full-workflow.js b/packages/git-storage-sdk-node/tests/full-workflow.js index 5ea8e310b..5362ce3b2 100644 --- a/packages/git-storage-sdk-node/tests/full-workflow.js +++ b/packages/git-storage-sdk-node/tests/full-workflow.js @@ -37,7 +37,7 @@ * * The script is idempotent per unique repo id – it always creates a fresh repo. */ - +import { SignJWT, importPKCS8 } from 'jose'; import { createHash, createPrivateKey } from 'node:crypto'; import { existsSync } from 'node:fs'; import { readFile } from 'node:fs/promises'; @@ -45,955 +45,1041 @@ import path from 'node:path'; import process from 'node:process'; import { setTimeout as delay } from 'node:timers/promises'; import { fileURLToPath, pathToFileURL } from 'node:url'; -import { importPKCS8, SignJWT } from 'jose'; const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); const defaultKeyPath = path.resolve( - __dirname, - '../../../git3p-backend/hack/test-scripts/dev-keys/private.pem', + __dirname, + '../../../git3p-backend/hack/test-scripts/dev-keys/private.pem' ); function gitBlobSha(contents) { - const buffer = Buffer.from(contents, 'utf8'); - const header = `blob ${buffer.byteLength}\0`; - return createHash('sha1').update(header).update(buffer).digest('hex'); + const buffer = Buffer.from(contents, 'utf8'); + const header = `blob ${buffer.byteLength}\0`; + return createHash('sha1').update(header).update(buffer).digest('hex'); } function normalizeEnvironment(value) { - if (!value) { - return 'local'; - } - const normalized = value.toLowerCase(); - if (normalized === 'local') { - return 'local'; - } - if (normalized === 'stage' || normalized === 'staging') { - return 'staging'; - } - if (normalized === 'prod' || normalized === 'production') { - return 'production'; - } - throw new Error( - `Unsupported environment "${value}". Expected one of: local, staging, prod, production.`, - ); + if (!value) { + return 'local'; + } + const normalized = value.toLowerCase(); + if (normalized === 'local') { + return 'local'; + } + if (normalized === 'stage' || normalized === 'staging') { + return 'staging'; + } + if (normalized === 'prod' || normalized === 'production') { + return 'production'; + } + throw new Error( + `Unsupported environment "${value}". Expected one of: local, staging, prod, production.` + ); } function applyOrgPlaceholder(value, org) { - if (!value) { - return value; - } - return value.includes('{{org}}') ? value.replace('{{org}}', org) : value; + if (!value) { + return value; + } + return value.includes('{{org}}') ? value.replace('{{org}}', org) : value; } function printUsage() { - const scriptName = path.relative(process.cwd(), __filename); - console.log( - [ - `Usage: node ${scriptName} [options]`, - '', - 'Options:', - ' -e, --environment ENV Target environment (local|staging|prod|production)', - ' -k, --key PATH Path to signing key PEM', - ' -s, --subdomain NAME Customer subdomain for non-local environments', - ' -n, --namespace NAME Explicit namespace override', - ' -r, --repo NAME Repository identifier override', - ' --api-base-url URL Override API base URL', - ' --storage-base-url HOST Override storage host (hostname[:port])', - ' --timeout MS Override timeout in milliseconds', - ' --key-id ID Override JWT key identifier', - ' -h, --help Show this help text and exit', - '', - 'Environment variables can also be used (see script header for details).', - ].join('\n'), - ); + const scriptName = path.relative(process.cwd(), __filename); + console.log( + [ + `Usage: node ${scriptName} [options]`, + '', + 'Options:', + ' -e, --environment ENV Target environment (local|staging|prod|production)', + ' -k, --key PATH Path to signing key PEM', + ' -s, --subdomain NAME Customer subdomain for non-local environments', + ' -n, --namespace NAME Explicit namespace override', + ' -r, --repo NAME Repository identifier override', + ' --api-base-url URL Override API base URL', + ' --storage-base-url HOST Override storage host (hostname[:port])', + ' --timeout MS Override timeout in milliseconds', + ' --key-id ID Override JWT key identifier', + ' -h, --help Show this help text and exit', + '', + 'Environment variables can also be used (see script header for details).', + ].join('\n') + ); } function parseArgs(argv) { - const options = {}; - for (let index = 0; index < argv.length; index += 1) { - const arg = argv[index]; - const readValue = () => { - const next = argv[index + 1]; - if (!next) { - throw new Error(`Missing value for option "${arg}"`); - } - index += 1; - return next; - }; - - switch (arg) { - case '-e': - case '--environment': - options.environment = readValue(); - break; - case '-k': - case '--key': - options.keyPath = readValue(); - break; - case '-s': - case '--subdomain': - options.subdomain = readValue(); - break; - case '-n': - case '--namespace': - options.namespace = readValue(); - break; - case '-r': - case '--repo': - options.repoId = readValue(); - break; - case '--api-base-url': - options.apiBaseUrl = readValue(); - break; - case '--storage-base-url': - options.storageBaseUrl = readValue(); - break; - case '--timeout': - options.timeout = readValue(); - break; - case '--key-id': - options.keyId = readValue(); - break; - case '-h': - case '--help': - options.help = true; - break; - default: - if (arg.startsWith('-')) { - throw new Error(`Unknown option "${arg}"`); - } - throw new Error(`Unexpected argument "${arg}"`); - } - } - return options; + const options = {}; + for (let index = 0; index < argv.length; index += 1) { + const arg = argv[index]; + const readValue = () => { + const next = argv[index + 1]; + if (!next) { + throw new Error(`Missing value for option "${arg}"`); + } + index += 1; + return next; + }; + + switch (arg) { + case '-e': + case '--environment': + options.environment = readValue(); + break; + case '-k': + case '--key': + options.keyPath = readValue(); + break; + case '-s': + case '--subdomain': + options.subdomain = readValue(); + break; + case '-n': + case '--namespace': + options.namespace = readValue(); + break; + case '-r': + case '--repo': + options.repoId = readValue(); + break; + case '--api-base-url': + options.apiBaseUrl = readValue(); + break; + case '--storage-base-url': + options.storageBaseUrl = readValue(); + break; + case '--timeout': + options.timeout = readValue(); + break; + case '--key-id': + options.keyId = readValue(); + break; + case '-h': + case '--help': + options.help = true; + break; + default: + if (arg.startsWith('-')) { + throw new Error(`Unknown option "${arg}"`); + } + throw new Error(`Unexpected argument "${arg}"`); + } + } + return options; } let cliOptions; try { - cliOptions = parseArgs(process.argv.slice(2)); + cliOptions = parseArgs(process.argv.slice(2)); } catch (error) { - console.error(error instanceof Error ? error.message : 'Failed to parse command line arguments.'); - printUsage(); - process.exit(1); + console.error( + error instanceof Error + ? error.message + : 'Failed to parse command line arguments.' + ); + printUsage(); + process.exit(1); } if (cliOptions?.help) { - printUsage(); - process.exit(0); + printUsage(); + process.exit(0); } const env = { - runEnv: normalizeEnvironment(process.env.GIT_STORAGE_ENV ?? 'local'), - apiBaseUrl: process.env.GIT_STORAGE_API_BASE_URL, - storageBaseUrl: process.env.GIT_STORAGE_STORAGE_BASE_URL, - namespace: process.env.GIT_STORAGE_NAME, - subdomain: process.env.GIT_STORAGE_SUBDOMAIN, - defaultBranch: process.env.GIT_STORAGE_DEFAULT_BRANCH ?? 'main', - repoId: process.env.GIT_STORAGE_REPO_ID, - keyPath: process.env.GIT_STORAGE_KEY_PATH ?? defaultKeyPath, - keyId: process.env.GIT_STORAGE_KEY_ID ?? 'dev-key-001', - timeoutMs: process.env.GIT_STORAGE_TIMEOUT - ? Number.parseInt(process.env.GIT_STORAGE_TIMEOUT, 10) - : undefined, + runEnv: normalizeEnvironment(process.env.GIT_STORAGE_ENV ?? 'local'), + apiBaseUrl: process.env.GIT_STORAGE_API_BASE_URL, + storageBaseUrl: process.env.GIT_STORAGE_STORAGE_BASE_URL, + namespace: process.env.GIT_STORAGE_NAME, + subdomain: process.env.GIT_STORAGE_SUBDOMAIN, + defaultBranch: process.env.GIT_STORAGE_DEFAULT_BRANCH ?? 'main', + repoId: process.env.GIT_STORAGE_REPO_ID, + keyPath: process.env.GIT_STORAGE_KEY_PATH ?? defaultKeyPath, + keyId: process.env.GIT_STORAGE_KEY_ID ?? 'dev-key-001', + timeoutMs: process.env.GIT_STORAGE_TIMEOUT + ? Number.parseInt(process.env.GIT_STORAGE_TIMEOUT, 10) + : undefined, }; if (cliOptions?.environment) { - env.runEnv = normalizeEnvironment(cliOptions.environment); + env.runEnv = normalizeEnvironment(cliOptions.environment); } if (cliOptions?.apiBaseUrl) { - env.apiBaseUrl = cliOptions.apiBaseUrl; + env.apiBaseUrl = cliOptions.apiBaseUrl; } if (cliOptions?.storageBaseUrl) { - env.storageBaseUrl = cliOptions.storageBaseUrl; + env.storageBaseUrl = cliOptions.storageBaseUrl; } if (cliOptions?.namespace) { - env.namespace = cliOptions.namespace; + env.namespace = cliOptions.namespace; } if (cliOptions?.subdomain) { - env.subdomain = cliOptions.subdomain; + env.subdomain = cliOptions.subdomain; } if (cliOptions?.repoId) { - env.repoId = cliOptions.repoId; + env.repoId = cliOptions.repoId; } if (cliOptions?.keyPath) { - env.keyPath = path.resolve(cliOptions.keyPath); + env.keyPath = path.resolve(cliOptions.keyPath); } if (cliOptions?.keyId) { - env.keyId = cliOptions.keyId; + env.keyId = cliOptions.keyId; } if (cliOptions?.timeout) { - const parsedTimeout = Number.parseInt(cliOptions.timeout, 10); - if (Number.isNaN(parsedTimeout) || parsedTimeout <= 0) { - console.error( - `Invalid timeout value "${cliOptions.timeout}". Expected a positive integer in milliseconds.`, - ); - printUsage(); - process.exit(1); - } - env.timeoutMs = parsedTimeout; + const parsedTimeout = Number.parseInt(cliOptions.timeout, 10); + if (Number.isNaN(parsedTimeout) || parsedTimeout <= 0) { + console.error( + `Invalid timeout value "${cliOptions.timeout}". Expected a positive integer in milliseconds.` + ); + printUsage(); + process.exit(1); + } + env.timeoutMs = parsedTimeout; } const KEY_CACHE = new Map(); const JSON_BODY_PREVIEW_LIMIT = 1_024; if (typeof Response !== 'undefined' && Response.prototype?.json) { - const originalResponseJson = Response.prototype.json; - Response.prototype.json = async function patchedJson(...args) { - try { - return await originalResponseJson.apply(this, args); - } catch (error) { - if (error instanceof SyntaxError) { - let bodyPreview = ''; - try { - const clone = this.clone(); - const text = await clone.text(); - bodyPreview = - text.length > JSON_BODY_PREVIEW_LIMIT - ? `${text.slice(0, JSON_BODY_PREVIEW_LIMIT)}...` - : text; - } catch (readError) { - const reason = readError instanceof Error ? readError.message : String(readError); - bodyPreview = ``; - } - - const context = { - url: this.url ?? '', - status: this.status ?? '', - statusText: this.statusText ?? '', - bodyPreview, - }; - - try { - const headers = {}; - for (const [key, value] of this.headers.entries()) { - headers[key] = value; - } - context.headers = headers; - } catch { - // Ignore header extraction failures. - } - - console.error('WARNING: Failed to parse JSON response.', context); - } - throw error; - } - }; + const originalResponseJson = Response.prototype.json; + Response.prototype.json = async function patchedJson(...args) { + try { + return await originalResponseJson.apply(this, args); + } catch (error) { + if (error instanceof SyntaxError) { + let bodyPreview = ''; + try { + const clone = this.clone(); + const text = await clone.text(); + bodyPreview = + text.length > JSON_BODY_PREVIEW_LIMIT + ? `${text.slice(0, JSON_BODY_PREVIEW_LIMIT)}...` + : text; + } catch (readError) { + const reason = + readError instanceof Error ? readError.message : String(readError); + bodyPreview = ``; + } + + const context = { + url: this.url ?? '', + status: this.status ?? '', + statusText: this.statusText ?? '', + bodyPreview, + }; + + try { + const headers = {}; + for (const [key, value] of this.headers.entries()) { + headers[key] = value; + } + context.headers = headers; + } catch { + // Ignore header extraction failures. + } + + console.error('WARNING: Failed to parse JSON response.', context); + } + throw error; + } + }; } function resolveDistEntry() { - const esmPath = path.resolve(__dirname, '../dist/index.js'); - if (existsSync(esmPath)) { - return esmPath; - } - return null; + const esmPath = path.resolve(__dirname, '../dist/index.js'); + if (existsSync(esmPath)) { + return esmPath; + } + return null; } async function loadGitStorage() { - const distEntry = resolveDistEntry(); - if (!distEntry) { - throw new Error( - [ - 'GitStorage dist build not found.', - 'Run "pnpm --filter @pierre/storage build" before executing this script,', - 'or provide a compiled dist at packages/git-storage-sdk/dist/index.js.', - ].join(' '), - ); - } - const module = await import(pathToFileURL(distEntry).href); - if (!module.GitStorage) { - throw new Error(`GitStorage export missing from ${distEntry}`); - } - patchGitStorage(module.GitStorage); - return module.GitStorage; + const distEntry = resolveDistEntry(); + if (!distEntry) { + throw new Error( + [ + 'GitStorage dist build not found.', + 'Run "pnpm --filter @pierre/storage build" before executing this script,', + 'or provide a compiled dist at packages/git-storage-sdk/dist/index.js.', + ].join(' ') + ); + } + const module = await import(pathToFileURL(distEntry).href); + if (!module.GitStorage) { + throw new Error(`GitStorage export missing from ${distEntry}`); + } + patchGitStorage(module.GitStorage); + return module.GitStorage; } async function waitFor(check, options = {}) { - const { timeout = 120_000, interval = 2_000, description, isFatalError, onRetry } = options; - const deadline = Date.now() + timeout; - let lastError; - let attempt = 0; - - while (Date.now() < deadline) { - attempt += 1; - try { - const result = await check(); - if (result) { - return result; - } - } catch (error) { - if (typeof isFatalError === 'function' && isFatalError(error)) { - throw error; - } - lastError = error; - if (typeof onRetry === 'function') { - onRetry({ attempt, error }); - } - } - if (!lastError && typeof onRetry === 'function') { - onRetry({ attempt }); - } - await delay(interval); - } - - const message = description - ? `Timed out waiting for ${description}` - : 'Timed out waiting for condition'; - if (lastError instanceof Error) { - throw new Error(message, { cause: lastError }); - } - throw new Error(message); + const { + timeout = 120_000, + interval = 2_000, + description, + isFatalError, + onRetry, + } = options; + const deadline = Date.now() + timeout; + let lastError; + let attempt = 0; + + while (Date.now() < deadline) { + attempt += 1; + try { + const result = await check(); + if (result) { + return result; + } + } catch (error) { + if (typeof isFatalError === 'function' && isFatalError(error)) { + throw error; + } + lastError = error; + if (typeof onRetry === 'function') { + onRetry({ attempt, error }); + } + } + if (!lastError && typeof onRetry === 'function') { + onRetry({ attempt }); + } + await delay(interval); + } + + const message = description + ? `Timed out waiting for ${description}` + : 'Timed out waiting for condition'; + if (lastError instanceof Error) { + throw new Error(message, { cause: lastError }); + } + throw new Error(message); } async function main() { - if (!existsSync(env.keyPath)) { - throw new Error( - `Signing key not found at ${env.keyPath}. Set GIT_STORAGE_KEY_PATH to override.`, - ); - } - - const key = await readFile(env.keyPath, 'utf8'); - - const namespace = - env.namespace ?? - (env.runEnv === 'local' - ? 'local' - : (env.subdomain ?? - (() => { - throw new Error( - 'Set GIT_STORAGE_NAME or GIT_STORAGE_SUBDOMAIN when targeting non-local environments.', - ); - })())); - - const namespaceSlug = namespace.toLowerCase(); - - const apiBaseUrl = applyOrgPlaceholder( - env.apiBaseUrl ?? - (env.runEnv === 'local' - ? 'http://127.0.0.1:8081' - : env.runEnv === 'staging' - ? 'https://api.{{org}}.3p.pierre.rip' - : 'https://api.{{org}}.code.storage'), - namespaceSlug, - ); - - const storageBaseUrl = applyOrgPlaceholder( - env.storageBaseUrl ?? - (env.runEnv === 'local' - ? '127.0.0.1:8080' - : env.runEnv === 'staging' - ? '{{org}}.3p.pierre.rip' - : '{{org}}.code.storage'), - namespaceSlug, - ); - - const repoId = env.repoId ?? `sdk-full-workflow-${Date.now()}`; - const defaultBranch = env.defaultBranch; - const timeout = env.timeoutMs ?? 180_000; - const grepToken = `SDK_GREP_${repoId}`; - - console.log(`▶ GitStorage full workflow`); - console.log(` Environment: ${env.runEnv}`); - console.log(` Namespace: ${namespace}`); - console.log(` API base: ${apiBaseUrl}`); - console.log(` Storage host:${storageBaseUrl}`); - console.log(` Repo ID: ${repoId}`); - console.log(` Timeout: ${timeout / 1000}s`); - - const GitStorage = await loadGitStorage(); - const store = new GitStorage({ - name: namespace, - key, - apiBaseUrl, - storageBaseUrl, - }); - - const repo = await store.createRepo({ id: repoId, defaultBranch }); - console.log(`✓ Repository created (${repo.id})`); - - const signature = () => ({ - name: 'SDK Committer', - email: 'sdk@example.com', - }); - - const initialSig = signature(); - const initialCommit = await repo - .createCommit({ - targetBranch: defaultBranch, - commitMessage: 'Initial commit: Add README via SDK', - author: initialSig, - committer: initialSig, - }) - .addFileFromString( - 'README.md', - [ - `# ${repoId}`, - '', - 'This repository is created by the GitStorage SDK full workflow script.', - '', - `Grep marker: ${grepToken}`, - 'Case marker: ONLYUPPERCASE', - ].join('\n'), - { encoding: 'utf-8' }, - ) - .send(); - const baselineCommitSha = initialCommit.commitSha; - let latestCommitSha = baselineCommitSha; - console.log(`✓ Initial commit pushed (${latestCommitSha})`); - - await waitFor( - async () => { - const branches = await repo.listBranches({ limit: 10 }); - return branches.branches.find( - (branch) => branch.name === defaultBranch && branch.headSha === latestCommitSha, - ); - }, - { timeout, description: `default branch ${defaultBranch} to include initial commit` }, - ); - console.log(`✓ Default branch updated (${defaultBranch})`); - - await waitFor( - async () => { - const commits = await repo.listCommits({ branch: defaultBranch, limit: 5 }); - return commits.commits.find((commit) => commit.sha === latestCommitSha); - }, - { timeout, description: 'initial commit to appear in commit list' }, - ); - console.log(`✓ Commit listing reflects initial commit`); - - if (typeof repo.grep !== 'function') { - throw new Error( - [ - 'This repo instance does not expose repo.grep().', - 'The full-workflow script loads the SDK from packages/git-storage-sdk/dist, which is likely stale.', - 'Run "pnpm --filter @pierre/storage build" and retry.', - ].join(' '), - ); - } - - await waitFor( - async () => { - const result = await repo.grep({ - ref: defaultBranch, - query: { pattern: 'onlyuppercase', caseSensitive: true }, - limits: { maxLines: 5 }, - }); - - if (result.matches.length !== 0) { - throw new Error( - `Expected case-sensitive grep to return zero matches, got ${result.matches.length}`, - ); - } - - return result; - }, - { - timeout, - description: 'grep API to return empty results for case-sensitive mismatch', - isFatalError: (error) => - error instanceof TypeError || - (error?.name === 'ApiError' && - typeof error.status === 'number' && - error.status >= 400 && - error.status < 500 && - error.status !== 429), - onRetry: ({ attempt, error }) => { - if (error && attempt % 3 === 0) { - console.log( - `… retrying grep case-sensitive check (attempt ${attempt}): ${error.message ?? error}`, - ); - } - }, - }, - ); - - const grepCaseInsensitive = await waitFor( - async () => { - const result = await repo.grep({ - ref: defaultBranch, - query: { pattern: 'onlyuppercase', caseSensitive: false }, - limits: { maxLines: 10 }, - }); - - const hasMatch = result.matches.some((match) => - match.lines.some((line) => line.type === 'match' && line.text.includes('ONLYUPPERCASE')), - ); - return hasMatch ? result : null; - }, - { - timeout, - description: 'grep API to return results for case-insensitive match', - isFatalError: (error) => - error instanceof TypeError || - (error?.name === 'ApiError' && - typeof error.status === 'number' && - error.status >= 400 && - error.status < 500 && - error.status !== 429), - onRetry: ({ attempt, error }) => { - if (attempt % 5 !== 0) { - return; - } - if (error) { - console.log(`… waiting for grep results (attempt ${attempt}): ${error.message ?? error}`); - } else { - console.log(`… waiting for grep results (attempt ${attempt})`); - } - }, - }, - ); - console.log( - `✓ Grep API returns case-insensitive matches (${grepCaseInsensitive.matches.length} file(s))`, - ); - - const grepFiltered = await waitFor( - async () => { - const result = await repo.grep({ - ref: defaultBranch, - query: { pattern: grepToken, caseSensitive: true }, - fileFilters: { includeGlobs: ['README.md'] }, - limits: { maxLines: 10, maxMatchesPerFile: 3 }, - }); - - const hasToken = result.matches.some((match) => - match.lines.some((line) => line.type === 'match' && line.text.includes(grepToken)), - ); - return hasToken ? result : null; - }, - { - timeout, - description: 'grep API to return matches filtered to README.md', - isFatalError: (error) => - error instanceof TypeError || - (error?.name === 'ApiError' && - typeof error.status === 'number' && - error.status >= 400 && - error.status < 500 && - error.status !== 429), - onRetry: ({ attempt, error }) => { - if (attempt % 5 !== 0) { - return; - } - if (error) { - console.log( - `… waiting for grep file-filtered match (attempt ${attempt}): ${error.message ?? error}`, - ); - } else { - console.log(`… waiting for grep file-filtered match (attempt ${attempt})`); - } - }, - }, - ); - console.log(`✓ Grep API respects file filters (${grepFiltered.matches.length} file(s))`); - - const packSig = signature(); - const addMessage = 'Add file via commit-pack API (SDK)'; - const addCommit = await repo - .createCommit({ - targetBranch: defaultBranch, - expectedHeadSha: latestCommitSha, - commitMessage: addMessage, - author: packSig, - committer: packSig, - }) - .addFileFromString( - 'api-generated.txt', - [ - 'File generated via GitStorage SDK full workflow script.', - `Repository: ${repoId}`, - `Commit message: ${addMessage}`, - ].join('\n'), - { encoding: 'utf-8' }, - ) - .send(); - latestCommitSha = addCommit.commitSha; - console.log(`✓ Commit-pack add executed (${latestCommitSha})`); - - await waitFor( - async () => { - const commits = await repo.listCommits({ branch: defaultBranch, limit: 5 }); - const match = commits.commits.find((commit) => commit.sha === latestCommitSha); - if (match) { - if (match.message !== addMessage) { - throw new Error(`Unexpected commit message: ${match.message}`); - } - return match; - } - return null; - }, - { timeout, description: 'commit-pack add to appear in commit list' }, - ); - console.log(`✓ Commit listing includes commit-pack add`); - - await waitFor( - async () => { - const branches = await repo.listBranches({ limit: 10 }); - return branches.branches.find( - (branch) => branch.name === defaultBranch && branch.headSha === latestCommitSha, - ); - }, - { timeout, description: `default branch ${defaultBranch} to advance to commit-pack add` }, - ); - console.log(`✓ Default branch advanced to commit-pack add`); - - await waitFor( - async () => { - const files = await repo.listFiles({ ref: defaultBranch }); - return files.paths.includes('api-generated.txt') ? files : null; - }, - { timeout, description: 'api-generated.txt to appear in listFiles response' }, - ); - console.log(`✓ api-generated.txt present via listFiles`); - - const updateSig = signature(); - const updateMessage = 'Update document via commit-pack API (SDK)'; - const updateCommit = await repo - .createCommit({ - targetBranch: defaultBranch, - expectedHeadSha: latestCommitSha, - commitMessage: updateMessage, - author: updateSig, - committer: updateSig, - }) - .addFileFromString( - 'api-generated.txt', - [ - 'File generated via GitStorage SDK full workflow script.', - `Repository: ${repoId}`, - 'Updated content: CommitPack run verified document update via SDK.', - ].join('\n'), - { encoding: 'utf-8' }, - ) - .send(); - latestCommitSha = updateCommit.commitSha; - console.log(`✓ Commit-pack update executed (${latestCommitSha})`); - - const updateInfo = await waitFor( - async () => { - const commits = await repo.listCommits({ branch: defaultBranch, limit: 5 }); - return commits.commits.find((commit) => commit.sha === latestCommitSha); - }, - { timeout, description: 'commit-pack update to appear in commit list' }, - ); - if (updateInfo.message !== updateMessage) { - throw new Error(`Unexpected commit message for update: ${updateInfo.message}`); - } - console.log(`✓ Commit listing includes commit-pack update`); - - await waitFor( - async () => { - const branches = await repo.listBranches({ limit: 10 }); - return branches.branches.find( - (branch) => branch.name === defaultBranch && branch.headSha === latestCommitSha, - ); - }, - { timeout, description: `default branch ${defaultBranch} to advance to commit-pack update` }, - ); - console.log(`✓ Default branch advanced to commit-pack update`); - - const diff = await waitFor( - async () => { - const response = await repo.getCommitDiff({ sha: latestCommitSha }); - return response.files.some((file) => file.path === 'api-generated.txt') ? response : null; - }, - { timeout, description: 'commit diff for commit-pack update' }, - ); - console.log(`✓ Commit diff verified (${diff.files.length} file(s))`); - - await waitFor( - async () => { - const response = await repo.getFileStream({ path: 'api-generated.txt', ref: defaultBranch }); - const body = await response.text(); - return body.includes('Updated content') ? body : null; - }, - { timeout, description: 'api-generated.txt to return updated content' }, - ); - console.log(`✓ api-generated.txt contains updated content`); - - const diffSig = signature(); - const diffMessage = 'Apply diff via createCommitFromDiff (SDK)'; - const diffFileName = 'diff-endpoint.txt'; - const diffLines = [ - 'Diff commit created by GitStorage SDK full workflow script.', - `Repository: ${repoId}`, - `Timestamp: ${new Date().toISOString()}`, - ]; - const diffBody = `${diffLines.join('\n')}\n`; - const diffBlobSha = gitBlobSha(diffBody); - const diffHunkHeader = `@@ -0,0 +1,${diffLines.length} @@`; - const diffPatchLines = [ - `diff --git a/${diffFileName} b/${diffFileName}`, - 'new file mode 100644', - `index 0000000000000000000000000000000000000000..${diffBlobSha}`, - '--- /dev/null', - `+++ b/${diffFileName}`, - diffHunkHeader, - ...diffLines.map((line) => `+${line}`), - ]; - const diffPatch = `${diffPatchLines.join('\n')}\n`; - - const diffCommit = await repo.createCommitFromDiff({ - targetBranch: defaultBranch, - expectedHeadSha: latestCommitSha, - commitMessage: diffMessage, - author: diffSig, - committer: diffSig, - diff: diffPatch, - }); - latestCommitSha = diffCommit.commitSha; - console.log(`✓ createCommitFromDiff commit executed (${latestCommitSha})`); - - const diffCommitInfo = await waitFor( - async () => { - const commits = await repo.listCommits({ branch: defaultBranch, limit: 5 }); - return commits.commits.find((commit) => commit.sha === latestCommitSha); - }, - { timeout, description: 'diff commit to appear in commit list' }, - ); - if (diffCommitInfo.message !== diffMessage) { - throw new Error(`Unexpected diff commit message: ${diffCommitInfo.message}`); - } - console.log('✓ Commit listing includes createCommitFromDiff commit'); - - await waitFor( - async () => { - const files = await repo.listFiles({ ref: defaultBranch }); - return files.paths.includes(diffFileName) ? files : null; - }, - { timeout, description: `${diffFileName} to appear in listFiles response` }, - ); - console.log(`✓ ${diffFileName} present via listFiles`); - - await waitFor( - async () => { - const response = await repo.getFileStream({ path: diffFileName, ref: defaultBranch }); - const body = await response.text(); - return body.includes('Diff commit created') ? body : null; - }, - { timeout, description: `${diffFileName} to contain diff-applied content` }, - ); - console.log(`✓ ${diffFileName} contains diff-applied content`); - - const featureBranch = `sdk-feature-${Date.now()}`; - const featureSig = signature(); - const featureMessage = 'Create feature branch via commit-pack API (SDK)'; - const featureOptions = { - targetBranch: featureBranch, - baseBranch: defaultBranch, - expectedHeadSha: latestCommitSha, - commitMessage: featureMessage, - author: featureSig, - committer: featureSig, - }; - console.log('[full-workflow] feature commit options', featureOptions); - const featureCommit = await repo - .createCommit(featureOptions) - .addFileFromString( - 'feature.txt', - [ - 'Feature branch file generated via GitStorage SDK full workflow script.', - `Repository: ${repoId}`, - `Branch: ${featureBranch}`, - ].join('\n'), - ) - .send(); - console.log(`✓ Feature branch commit created (${featureCommit.commitSha})`); - - await waitFor( - async () => { - const branches = await repo.listBranches({ limit: 25 }); - return branches.branches.find( - (branch) => branch.name === featureBranch && branch.headSha === featureCommit.commitSha, - ); - }, - { timeout, description: `feature branch ${featureBranch} to appear in branch list` }, - ); - console.log(`✓ Feature branch ${featureBranch} reported by listBranches`); - - await waitFor( - async () => { - const files = await repo.listFiles({ ref: featureBranch }); - return files.paths.includes('feature.txt') ? files : null; - }, - { timeout, description: 'feature branch file to be accessible' }, - ); - console.log(`✓ feature.txt accessible on ${featureBranch}`); - - const restoreSig = signature(); - const restoreMessage = `Restore to pre-deploy baseline`; - const restoreResult = await repo.restoreCommit({ - targetBranch: defaultBranch, - expectedHeadSha: latestCommitSha, - targetCommitSha: baselineCommitSha, - commitMessage: restoreMessage, - author: restoreSig, - committer: restoreSig, - }); - latestCommitSha = restoreResult.commitSha; - console.log(`✓ Restore commit executed (${latestCommitSha})`); - - const restoreInfo = await waitFor( - async () => { - const commits = await repo.listCommits({ branch: defaultBranch, limit: 5 }); - return commits.commits.find((commit) => commit.sha === latestCommitSha); - }, - { timeout, description: 'restore commit to appear in commit list' }, - ); - if (restoreInfo.message !== restoreMessage) { - throw new Error(`Unexpected restore commit message: ${restoreInfo.message}`); - } - - await waitFor( - async () => { - const branches = await repo.listBranches({ limit: 10 }); - return branches.branches.find( - (branch) => branch.name === defaultBranch && branch.headSha === latestCommitSha, - ); - }, - { timeout, description: `default branch ${defaultBranch} to advance to restore commit` }, - ); - console.log(`✓ Default branch advanced to restore commit`); - - await waitFor( - async () => { - const files = await repo.listFiles({ ref: defaultBranch }); - return files.paths.some((path) => path === 'api-generated.txt' || path === diffFileName) - ? null - : files; - }, - { - timeout, - description: 'api-generated.txt and diff-endpoint.txt removed after restore commit', - }, - ); - console.log(`✓ api-generated.txt and ${diffFileName} removed by restore commit`); - - const readmeBody = await repo - .getFileStream({ path: 'README.md', ref: defaultBranch }) - .then((resp) => resp.text()); - if (!readmeBody.includes(repoId)) { - throw new Error('README does not contain repository identifier'); - } - console.log(`✓ README accessible via getFileStream`); - - console.log('\n✅ GitStorage SDK full workflow completed successfully.'); - console.log(` Repository: ${repoId}`); - console.log(` Default branch: ${defaultBranch}`); + if (!existsSync(env.keyPath)) { + throw new Error( + `Signing key not found at ${env.keyPath}. Set GIT_STORAGE_KEY_PATH to override.` + ); + } + + const key = await readFile(env.keyPath, 'utf8'); + + const namespace = + env.namespace ?? + (env.runEnv === 'local' + ? 'local' + : (env.subdomain ?? + (() => { + throw new Error( + 'Set GIT_STORAGE_NAME or GIT_STORAGE_SUBDOMAIN when targeting non-local environments.' + ); + })())); + + const namespaceSlug = namespace.toLowerCase(); + + const apiBaseUrl = applyOrgPlaceholder( + env.apiBaseUrl ?? + (env.runEnv === 'local' + ? 'http://127.0.0.1:8081' + : env.runEnv === 'staging' + ? 'https://api.{{org}}.3p.pierre.rip' + : 'https://api.{{org}}.code.storage'), + namespaceSlug + ); + + const storageBaseUrl = applyOrgPlaceholder( + env.storageBaseUrl ?? + (env.runEnv === 'local' + ? '127.0.0.1:8080' + : env.runEnv === 'staging' + ? '{{org}}.3p.pierre.rip' + : '{{org}}.code.storage'), + namespaceSlug + ); + + const repoId = env.repoId ?? `sdk-full-workflow-${Date.now()}`; + const defaultBranch = env.defaultBranch; + const timeout = env.timeoutMs ?? 180_000; + const grepToken = `SDK_GREP_${repoId}`; + + console.log(`▶ GitStorage full workflow`); + console.log(` Environment: ${env.runEnv}`); + console.log(` Namespace: ${namespace}`); + console.log(` API base: ${apiBaseUrl}`); + console.log(` Storage host:${storageBaseUrl}`); + console.log(` Repo ID: ${repoId}`); + console.log(` Timeout: ${timeout / 1000}s`); + + const GitStorage = await loadGitStorage(); + const store = new GitStorage({ + name: namespace, + key, + apiBaseUrl, + storageBaseUrl, + }); + + const repo = await store.createRepo({ id: repoId, defaultBranch }); + console.log(`✓ Repository created (${repo.id})`); + + const signature = () => ({ + name: 'SDK Committer', + email: 'sdk@example.com', + }); + + const initialSig = signature(); + const initialCommit = await repo + .createCommit({ + targetBranch: defaultBranch, + commitMessage: 'Initial commit: Add README via SDK', + author: initialSig, + committer: initialSig, + }) + .addFileFromString( + 'README.md', + [ + `# ${repoId}`, + '', + 'This repository is created by the GitStorage SDK full workflow script.', + '', + `Grep marker: ${grepToken}`, + 'Case marker: ONLYUPPERCASE', + ].join('\n'), + { encoding: 'utf-8' } + ) + .send(); + const baselineCommitSha = initialCommit.commitSha; + let latestCommitSha = baselineCommitSha; + console.log(`✓ Initial commit pushed (${latestCommitSha})`); + + await waitFor( + async () => { + const branches = await repo.listBranches({ limit: 10 }); + return branches.branches.find( + (branch) => + branch.name === defaultBranch && branch.headSha === latestCommitSha + ); + }, + { + timeout, + description: `default branch ${defaultBranch} to include initial commit`, + } + ); + console.log(`✓ Default branch updated (${defaultBranch})`); + + await waitFor( + async () => { + const commits = await repo.listCommits({ + branch: defaultBranch, + limit: 5, + }); + return commits.commits.find((commit) => commit.sha === latestCommitSha); + }, + { timeout, description: 'initial commit to appear in commit list' } + ); + console.log(`✓ Commit listing reflects initial commit`); + + if (typeof repo.grep !== 'function') { + throw new Error( + [ + 'This repo instance does not expose repo.grep().', + 'The full-workflow script loads the SDK from packages/git-storage-sdk/dist, which is likely stale.', + 'Run "pnpm --filter @pierre/storage build" and retry.', + ].join(' ') + ); + } + + await waitFor( + async () => { + const result = await repo.grep({ + ref: defaultBranch, + query: { pattern: 'onlyuppercase', caseSensitive: true }, + limits: { maxLines: 5 }, + }); + + if (result.matches.length !== 0) { + throw new Error( + `Expected case-sensitive grep to return zero matches, got ${result.matches.length}` + ); + } + + return result; + }, + { + timeout, + description: + 'grep API to return empty results for case-sensitive mismatch', + isFatalError: (error) => + error instanceof TypeError || + (error?.name === 'ApiError' && + typeof error.status === 'number' && + error.status >= 400 && + error.status < 500 && + error.status !== 429), + onRetry: ({ attempt, error }) => { + if (error && attempt % 3 === 0) { + console.log( + `… retrying grep case-sensitive check (attempt ${attempt}): ${error.message ?? error}` + ); + } + }, + } + ); + + const grepCaseInsensitive = await waitFor( + async () => { + const result = await repo.grep({ + ref: defaultBranch, + query: { pattern: 'onlyuppercase', caseSensitive: false }, + limits: { maxLines: 10 }, + }); + + const hasMatch = result.matches.some((match) => + match.lines.some( + (line) => line.type === 'match' && line.text.includes('ONLYUPPERCASE') + ) + ); + return hasMatch ? result : null; + }, + { + timeout, + description: 'grep API to return results for case-insensitive match', + isFatalError: (error) => + error instanceof TypeError || + (error?.name === 'ApiError' && + typeof error.status === 'number' && + error.status >= 400 && + error.status < 500 && + error.status !== 429), + onRetry: ({ attempt, error }) => { + if (attempt % 5 !== 0) { + return; + } + if (error) { + console.log( + `… waiting for grep results (attempt ${attempt}): ${error.message ?? error}` + ); + } else { + console.log(`… waiting for grep results (attempt ${attempt})`); + } + }, + } + ); + console.log( + `✓ Grep API returns case-insensitive matches (${grepCaseInsensitive.matches.length} file(s))` + ); + + const grepFiltered = await waitFor( + async () => { + const result = await repo.grep({ + ref: defaultBranch, + query: { pattern: grepToken, caseSensitive: true }, + fileFilters: { includeGlobs: ['README.md'] }, + limits: { maxLines: 10, maxMatchesPerFile: 3 }, + }); + + const hasToken = result.matches.some((match) => + match.lines.some( + (line) => line.type === 'match' && line.text.includes(grepToken) + ) + ); + return hasToken ? result : null; + }, + { + timeout, + description: 'grep API to return matches filtered to README.md', + isFatalError: (error) => + error instanceof TypeError || + (error?.name === 'ApiError' && + typeof error.status === 'number' && + error.status >= 400 && + error.status < 500 && + error.status !== 429), + onRetry: ({ attempt, error }) => { + if (attempt % 5 !== 0) { + return; + } + if (error) { + console.log( + `… waiting for grep file-filtered match (attempt ${attempt}): ${error.message ?? error}` + ); + } else { + console.log( + `… waiting for grep file-filtered match (attempt ${attempt})` + ); + } + }, + } + ); + console.log( + `✓ Grep API respects file filters (${grepFiltered.matches.length} file(s))` + ); + + const packSig = signature(); + const addMessage = 'Add file via commit-pack API (SDK)'; + const addCommit = await repo + .createCommit({ + targetBranch: defaultBranch, + expectedHeadSha: latestCommitSha, + commitMessage: addMessage, + author: packSig, + committer: packSig, + }) + .addFileFromString( + 'api-generated.txt', + [ + 'File generated via GitStorage SDK full workflow script.', + `Repository: ${repoId}`, + `Commit message: ${addMessage}`, + ].join('\n'), + { encoding: 'utf-8' } + ) + .send(); + latestCommitSha = addCommit.commitSha; + console.log(`✓ Commit-pack add executed (${latestCommitSha})`); + + await waitFor( + async () => { + const commits = await repo.listCommits({ + branch: defaultBranch, + limit: 5, + }); + const match = commits.commits.find( + (commit) => commit.sha === latestCommitSha + ); + if (match) { + if (match.message !== addMessage) { + throw new Error(`Unexpected commit message: ${match.message}`); + } + return match; + } + return null; + }, + { timeout, description: 'commit-pack add to appear in commit list' } + ); + console.log(`✓ Commit listing includes commit-pack add`); + + await waitFor( + async () => { + const branches = await repo.listBranches({ limit: 10 }); + return branches.branches.find( + (branch) => + branch.name === defaultBranch && branch.headSha === latestCommitSha + ); + }, + { + timeout, + description: `default branch ${defaultBranch} to advance to commit-pack add`, + } + ); + console.log(`✓ Default branch advanced to commit-pack add`); + + await waitFor( + async () => { + const files = await repo.listFiles({ ref: defaultBranch }); + return files.paths.includes('api-generated.txt') ? files : null; + }, + { + timeout, + description: 'api-generated.txt to appear in listFiles response', + } + ); + console.log(`✓ api-generated.txt present via listFiles`); + + const updateSig = signature(); + const updateMessage = 'Update document via commit-pack API (SDK)'; + const updateCommit = await repo + .createCommit({ + targetBranch: defaultBranch, + expectedHeadSha: latestCommitSha, + commitMessage: updateMessage, + author: updateSig, + committer: updateSig, + }) + .addFileFromString( + 'api-generated.txt', + [ + 'File generated via GitStorage SDK full workflow script.', + `Repository: ${repoId}`, + 'Updated content: CommitPack run verified document update via SDK.', + ].join('\n'), + { encoding: 'utf-8' } + ) + .send(); + latestCommitSha = updateCommit.commitSha; + console.log(`✓ Commit-pack update executed (${latestCommitSha})`); + + const updateInfo = await waitFor( + async () => { + const commits = await repo.listCommits({ + branch: defaultBranch, + limit: 5, + }); + return commits.commits.find((commit) => commit.sha === latestCommitSha); + }, + { timeout, description: 'commit-pack update to appear in commit list' } + ); + if (updateInfo.message !== updateMessage) { + throw new Error( + `Unexpected commit message for update: ${updateInfo.message}` + ); + } + console.log(`✓ Commit listing includes commit-pack update`); + + await waitFor( + async () => { + const branches = await repo.listBranches({ limit: 10 }); + return branches.branches.find( + (branch) => + branch.name === defaultBranch && branch.headSha === latestCommitSha + ); + }, + { + timeout, + description: `default branch ${defaultBranch} to advance to commit-pack update`, + } + ); + console.log(`✓ Default branch advanced to commit-pack update`); + + const diff = await waitFor( + async () => { + const response = await repo.getCommitDiff({ sha: latestCommitSha }); + return response.files.some((file) => file.path === 'api-generated.txt') + ? response + : null; + }, + { timeout, description: 'commit diff for commit-pack update' } + ); + console.log(`✓ Commit diff verified (${diff.files.length} file(s))`); + + await waitFor( + async () => { + const response = await repo.getFileStream({ + path: 'api-generated.txt', + ref: defaultBranch, + }); + const body = await response.text(); + return body.includes('Updated content') ? body : null; + }, + { timeout, description: 'api-generated.txt to return updated content' } + ); + console.log(`✓ api-generated.txt contains updated content`); + + const diffSig = signature(); + const diffMessage = 'Apply diff via createCommitFromDiff (SDK)'; + const diffFileName = 'diff-endpoint.txt'; + const diffLines = [ + 'Diff commit created by GitStorage SDK full workflow script.', + `Repository: ${repoId}`, + `Timestamp: ${new Date().toISOString()}`, + ]; + const diffBody = `${diffLines.join('\n')}\n`; + const diffBlobSha = gitBlobSha(diffBody); + const diffHunkHeader = `@@ -0,0 +1,${diffLines.length} @@`; + const diffPatchLines = [ + `diff --git a/${diffFileName} b/${diffFileName}`, + 'new file mode 100644', + `index 0000000000000000000000000000000000000000..${diffBlobSha}`, + '--- /dev/null', + `+++ b/${diffFileName}`, + diffHunkHeader, + ...diffLines.map((line) => `+${line}`), + ]; + const diffPatch = `${diffPatchLines.join('\n')}\n`; + + const diffCommit = await repo.createCommitFromDiff({ + targetBranch: defaultBranch, + expectedHeadSha: latestCommitSha, + commitMessage: diffMessage, + author: diffSig, + committer: diffSig, + diff: diffPatch, + }); + latestCommitSha = diffCommit.commitSha; + console.log(`✓ createCommitFromDiff commit executed (${latestCommitSha})`); + + const diffCommitInfo = await waitFor( + async () => { + const commits = await repo.listCommits({ + branch: defaultBranch, + limit: 5, + }); + return commits.commits.find((commit) => commit.sha === latestCommitSha); + }, + { timeout, description: 'diff commit to appear in commit list' } + ); + if (diffCommitInfo.message !== diffMessage) { + throw new Error( + `Unexpected diff commit message: ${diffCommitInfo.message}` + ); + } + console.log('✓ Commit listing includes createCommitFromDiff commit'); + + await waitFor( + async () => { + const files = await repo.listFiles({ ref: defaultBranch }); + return files.paths.includes(diffFileName) ? files : null; + }, + { timeout, description: `${diffFileName} to appear in listFiles response` } + ); + console.log(`✓ ${diffFileName} present via listFiles`); + + await waitFor( + async () => { + const response = await repo.getFileStream({ + path: diffFileName, + ref: defaultBranch, + }); + const body = await response.text(); + return body.includes('Diff commit created') ? body : null; + }, + { timeout, description: `${diffFileName} to contain diff-applied content` } + ); + console.log(`✓ ${diffFileName} contains diff-applied content`); + + const featureBranch = `sdk-feature-${Date.now()}`; + const featureSig = signature(); + const featureMessage = 'Create feature branch via commit-pack API (SDK)'; + const featureOptions = { + targetBranch: featureBranch, + baseBranch: defaultBranch, + expectedHeadSha: latestCommitSha, + commitMessage: featureMessage, + author: featureSig, + committer: featureSig, + }; + console.log('[full-workflow] feature commit options', featureOptions); + const featureCommit = await repo + .createCommit(featureOptions) + .addFileFromString( + 'feature.txt', + [ + 'Feature branch file generated via GitStorage SDK full workflow script.', + `Repository: ${repoId}`, + `Branch: ${featureBranch}`, + ].join('\n') + ) + .send(); + console.log(`✓ Feature branch commit created (${featureCommit.commitSha})`); + + await waitFor( + async () => { + const branches = await repo.listBranches({ limit: 25 }); + return branches.branches.find( + (branch) => + branch.name === featureBranch && + branch.headSha === featureCommit.commitSha + ); + }, + { + timeout, + description: `feature branch ${featureBranch} to appear in branch list`, + } + ); + console.log(`✓ Feature branch ${featureBranch} reported by listBranches`); + + await waitFor( + async () => { + const files = await repo.listFiles({ ref: featureBranch }); + return files.paths.includes('feature.txt') ? files : null; + }, + { timeout, description: 'feature branch file to be accessible' } + ); + console.log(`✓ feature.txt accessible on ${featureBranch}`); + + const restoreSig = signature(); + const restoreMessage = `Restore to pre-deploy baseline`; + const restoreResult = await repo.restoreCommit({ + targetBranch: defaultBranch, + expectedHeadSha: latestCommitSha, + targetCommitSha: baselineCommitSha, + commitMessage: restoreMessage, + author: restoreSig, + committer: restoreSig, + }); + latestCommitSha = restoreResult.commitSha; + console.log(`✓ Restore commit executed (${latestCommitSha})`); + + const restoreInfo = await waitFor( + async () => { + const commits = await repo.listCommits({ + branch: defaultBranch, + limit: 5, + }); + return commits.commits.find((commit) => commit.sha === latestCommitSha); + }, + { timeout, description: 'restore commit to appear in commit list' } + ); + if (restoreInfo.message !== restoreMessage) { + throw new Error( + `Unexpected restore commit message: ${restoreInfo.message}` + ); + } + + await waitFor( + async () => { + const branches = await repo.listBranches({ limit: 10 }); + return branches.branches.find( + (branch) => + branch.name === defaultBranch && branch.headSha === latestCommitSha + ); + }, + { + timeout, + description: `default branch ${defaultBranch} to advance to restore commit`, + } + ); + console.log(`✓ Default branch advanced to restore commit`); + + await waitFor( + async () => { + const files = await repo.listFiles({ ref: defaultBranch }); + return files.paths.some( + (path) => path === 'api-generated.txt' || path === diffFileName + ) + ? null + : files; + }, + { + timeout, + description: + 'api-generated.txt and diff-endpoint.txt removed after restore commit', + } + ); + console.log( + `✓ api-generated.txt and ${diffFileName} removed by restore commit` + ); + + const readmeBody = await repo + .getFileStream({ path: 'README.md', ref: defaultBranch }) + .then((resp) => resp.text()); + if (!readmeBody.includes(repoId)) { + throw new Error('README does not contain repository identifier'); + } + console.log(`✓ README accessible via getFileStream`); + + console.log('\n✅ GitStorage SDK full workflow completed successfully.'); + console.log(` Repository: ${repoId}`); + console.log(` Default branch: ${defaultBranch}`); } function patchGitStorage(GitStorage) { - if (GitStorage.prototype.__fullWorkflowPatched) { - return; - } - - GitStorage.prototype.__fullWorkflowPatched = true; - - GitStorage.prototype.generateJWT = async function patchedGenerateJWT(repoId, options) { - const permissions = options?.permissions || ['git:write', 'git:read']; - const ttl = options?.ttl || 365 * 24 * 60 * 60; - const now = Math.floor(Date.now() / 1_000); - const payload = { - iss: this.options.name, - sub: '@pierre/storage', - repo: repoId, - scopes: permissions, - iat: now, - exp: now + ttl, - }; - - const { key, alg } = await resolveSigningKey(this.options.key); - const header = { alg, typ: 'JWT' }; - if (env.keyId) { - header.kid = env.keyId; - } - - return new SignJWT(payload).setProtectedHeader(header).sign(key); - }; + if (GitStorage.prototype.__fullWorkflowPatched) { + return; + } + + GitStorage.prototype.__fullWorkflowPatched = true; + + GitStorage.prototype.generateJWT = async function patchedGenerateJWT( + repoId, + options + ) { + const permissions = options?.permissions || ['git:write', 'git:read']; + const ttl = options?.ttl || 365 * 24 * 60 * 60; + const now = Math.floor(Date.now() / 1_000); + const payload = { + iss: this.options.name, + sub: '@pierre/storage', + repo: repoId, + scopes: permissions, + iat: now, + exp: now + ttl, + }; + + const { key, alg } = await resolveSigningKey(this.options.key); + const header = { alg, typ: 'JWT' }; + if (env.keyId) { + header.kid = env.keyId; + } + + return new SignJWT(payload).setProtectedHeader(header).sign(key); + }; } async function resolveSigningKey(pem) { - if (KEY_CACHE.has(pem)) { - return KEY_CACHE.get(pem); - } - - let selectedAlgorithm; - - try { - const keyObject = createPrivateKey({ key: pem, format: 'pem' }); - const type = keyObject.asymmetricKeyType; - if (type === 'rsa') { - selectedAlgorithm = 'RS256'; - } else if (type === 'rsa-pss') { - selectedAlgorithm = 'PS256'; - } else if (type === 'ec') { - const curve = keyObject.asymmetricKeyDetails?.namedCurve?.toLowerCase(); - switch (curve) { - case 'prime256v1': - case 'secp256r1': - case 'p-256': - selectedAlgorithm = 'ES256'; - break; - case 'secp384r1': - case 'p-384': - selectedAlgorithm = 'ES384'; - break; - case 'secp521r1': - case 'p-521': - selectedAlgorithm = 'ES512'; - break; - default: - break; - } - } else if (type === 'ed25519' || type === 'ed448') { - selectedAlgorithm = 'EdDSA'; - } - // fallthrough to general detection if selectedAlgorithm remains undefined - } catch { - // Ignore inspection errors, fall back to brute-force detection below. - } - - if (selectedAlgorithm) { - try { - const key = await importPKCS8(pem, selectedAlgorithm); - const entry = { key, alg: selectedAlgorithm }; - KEY_CACHE.set(pem, entry); - return entry; - } catch { - // If the direct attempt fails, continue with the fallback list below. - } - } - - const algorithms = [ - 'RS256', - 'RS384', - 'RS512', - 'PS256', - 'PS384', - 'PS512', - 'ES256', - 'ES384', - 'ES512', - 'EdDSA', - ]; - let lastError; - - for (const alg of algorithms) { - try { - const key = await importPKCS8(pem, alg); - const entry = { key, alg }; - KEY_CACHE.set(pem, entry); - return entry; - } catch (error) { - lastError = error; - } - } - - throw new Error('Unsupported key type for JWT signing', { cause: lastError }); + if (KEY_CACHE.has(pem)) { + return KEY_CACHE.get(pem); + } + + let selectedAlgorithm; + + try { + const keyObject = createPrivateKey({ key: pem, format: 'pem' }); + const type = keyObject.asymmetricKeyType; + if (type === 'rsa') { + selectedAlgorithm = 'RS256'; + } else if (type === 'rsa-pss') { + selectedAlgorithm = 'PS256'; + } else if (type === 'ec') { + const curve = keyObject.asymmetricKeyDetails?.namedCurve?.toLowerCase(); + switch (curve) { + case 'prime256v1': + case 'secp256r1': + case 'p-256': + selectedAlgorithm = 'ES256'; + break; + case 'secp384r1': + case 'p-384': + selectedAlgorithm = 'ES384'; + break; + case 'secp521r1': + case 'p-521': + selectedAlgorithm = 'ES512'; + break; + default: + break; + } + } else if (type === 'ed25519' || type === 'ed448') { + selectedAlgorithm = 'EdDSA'; + } + // fallthrough to general detection if selectedAlgorithm remains undefined + } catch { + // Ignore inspection errors, fall back to brute-force detection below. + } + + if (selectedAlgorithm) { + try { + const key = await importPKCS8(pem, selectedAlgorithm); + const entry = { key, alg: selectedAlgorithm }; + KEY_CACHE.set(pem, entry); + return entry; + } catch { + // If the direct attempt fails, continue with the fallback list below. + } + } + + const algorithms = [ + 'RS256', + 'RS384', + 'RS512', + 'PS256', + 'PS384', + 'PS512', + 'ES256', + 'ES384', + 'ES512', + 'EdDSA', + ]; + let lastError; + + for (const alg of algorithms) { + try { + const key = await importPKCS8(pem, alg); + const entry = { key, alg }; + KEY_CACHE.set(pem, entry); + return entry; + } catch (error) { + lastError = error; + } + } + + throw new Error('Unsupported key type for JWT signing', { cause: lastError }); } main().catch((error) => { - console.error('❌ Workflow failed.'); - console.error(error instanceof Error ? (error.stack ?? error.message) : error); - process.exitCode = 1; + console.error('❌ Workflow failed.'); + console.error( + error instanceof Error ? (error.stack ?? error.message) : error + ); + process.exitCode = 1; }); diff --git a/packages/git-storage-sdk-node/tests/index.test.ts b/packages/git-storage-sdk-node/tests/index.test.ts index 6fb7b52de..fcb641949 100644 --- a/packages/git-storage-sdk-node/tests/index.test.ts +++ b/packages/git-storage-sdk-node/tests/index.test.ts @@ -1,16 +1,25 @@ import { importPKCS8, jwtVerify } from 'jose'; import { beforeEach, describe, expect, it, vi } from 'vitest'; -import { CodeStorage, createClient, GitStorage } from '../src/index'; + +import { CodeStorage, GitStorage, createClient } from '../src/index'; // Mock fetch globally if it is not already stubbed const existingFetch = globalThis.fetch as unknown; const mockFetch = - existingFetch && typeof existingFetch === 'function' && 'mock' in (existingFetch as any) - ? (existingFetch as ReturnType) - : vi.fn(); - -if (!(existingFetch && typeof existingFetch === 'function' && 'mock' in (existingFetch as any))) { - vi.stubGlobal('fetch', mockFetch); + existingFetch && + typeof existingFetch === 'function' && + 'mock' in (existingFetch as any) + ? (existingFetch as ReturnType) + : vi.fn(); + +if ( + !( + existingFetch && + typeof existingFetch === 'function' && + 'mock' in (existingFetch as any) + ) +) { + vi.stubGlobal('fetch', mockFetch); } const key = `-----BEGIN PRIVATE KEY----- @@ -20,1661 +29,1771 @@ yTh6suablSura7ZDG8hpm3oNsq/ykC3Scfsw6ZTuuVuLlXKV/be/Xr0d -----END PRIVATE KEY-----`; const decodeJwtPayload = (jwt: string) => { - const parts = jwt.split('.'); - if (parts.length !== 3) { - throw new Error('Invalid JWT format'); - } - return JSON.parse(Buffer.from(parts[1], 'base64url').toString()); + const parts = jwt.split('.'); + if (parts.length !== 3) { + throw new Error('Invalid JWT format'); + } + return JSON.parse(Buffer.from(parts[1], 'base64url').toString()); }; const stripBearer = (value: string): string => value.replace(/^Bearer\s+/i, ''); describe('GitStorage', () => { - beforeEach(() => { - // Reset mock before each test - mockFetch.mockReset(); - // Default successful response for createRepo - mockFetch.mockResolvedValue({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ repo_id: 'test-repo-id', url: 'https://test.code.storage/repo.git' }), - }); - }); - describe('constructor', () => { - it('should create an instance with required options', () => { - const store = new GitStorage({ name: 'v0', key }); - expect(store).toBeInstanceOf(GitStorage); - }); - - it('should store the provided key', () => { - const store = new GitStorage({ name: 'v0', key }); - const config = store.getConfig(); - expect(config.key).toBe(key); - }); - - it('should throw error when key is missing', () => { - expect(() => { - // @ts-expect-error - Testing missing key - new GitStorage({}); - }).toThrow( - 'GitStorage requires a name and key. Please check your configuration and try again.', - ); - }); - - it('should throw error when name or key is null or undefined', () => { - expect(() => { - // @ts-expect-error - Testing null key - new GitStorage({ name: 'v0', key: null }); - }).toThrow( - 'GitStorage requires a name and key. Please check your configuration and try again.', - ); - - expect(() => { - // @ts-expect-error - Testing undefined key - new GitStorage({ name: 'v0', key: undefined }); - }).toThrow( - 'GitStorage requires a name and key. Please check your configuration and try again.', - ); - - expect(() => { - // @ts-expect-error - Testing null name - new GitStorage({ name: null, key: 'test-key' }); - }).toThrow( - 'GitStorage requires a name and key. Please check your configuration and try again.', - ); - - expect(() => { - // @ts-expect-error - Testing undefined name - new GitStorage({ name: undefined, key: 'test-key' }); - }).toThrow( - 'GitStorage requires a name and key. Please check your configuration and try again.', - ); - }); - - it('should throw error when key is empty string', () => { - expect(() => { - new GitStorage({ name: 'v0', key: '' }); - }).toThrow('GitStorage key must be a non-empty string.'); - }); - - it('should throw error when name is empty string', () => { - expect(() => { - new GitStorage({ name: '', key: 'test-key' }); - }).toThrow('GitStorage name must be a non-empty string.'); - }); - - it('should throw error when key is only whitespace', () => { - expect(() => { - new GitStorage({ name: 'v0', key: ' ' }); - }).toThrow('GitStorage key must be a non-empty string.'); - }); - - it('should throw error when name is only whitespace', () => { - expect(() => { - new GitStorage({ name: ' ', key: 'test-key' }); - }).toThrow('GitStorage name must be a non-empty string.'); - }); - - it('should throw error when key is not a string', () => { - expect(() => { - // @ts-expect-error - Testing non-string key - new GitStorage({ name: 'v0', key: 123 }); - }).toThrow('GitStorage key must be a non-empty string.'); - - expect(() => { - // @ts-expect-error - Testing non-string key - new GitStorage({ name: 'v0', key: {} }); - }).toThrow('GitStorage key must be a non-empty string.'); - }); - - it('should throw error when name is not a string', () => { - expect(() => { - // @ts-expect-error - Testing non-string name - new GitStorage({ name: 123, key: 'test-key' }); - }).toThrow('GitStorage name must be a non-empty string.'); - - expect(() => { - // @ts-expect-error - Testing non-string name - new GitStorage({ name: {}, key: 'test-key' }); - }).toThrow('GitStorage name must be a non-empty string.'); - }); - }); - - it('parses commit dates into Date instances', async () => { - const store = new GitStorage({ name: 'v0', key }); - - const repo = await store.createRepo({ id: 'repo-dates' }); - - const rawCommits = { - commits: [ - { - sha: 'abc123', - message: 'feat: add endpoint', - author_name: 'Jane Doe', - author_email: 'jane@example.com', - committer_name: 'Jane Doe', - committer_email: 'jane@example.com', - date: '2024-01-15T14:32:18Z', - }, - ], - next_cursor: undefined, - has_more: false, - }; - - mockFetch.mockImplementationOnce(() => - Promise.resolve({ - ok: true, - status: 200, - json: async () => rawCommits, - }), - ); - - const commits = await repo.listCommits(); - expect(commits.commits[0].rawDate).toBe('2024-01-15T14:32:18Z'); - expect(commits.commits[0].date).toBeInstanceOf(Date); - expect(commits.commits[0].date.toISOString()).toBe('2024-01-15T14:32:18.000Z'); - }); - - it('fetches git notes with getNote', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({ id: 'repo-notes-read' }); - - mockFetch.mockImplementationOnce((url, init) => { - expect(init?.method).toBe('GET'); - const requestUrl = new URL(url as string); - expect(requestUrl.pathname.endsWith('/repos/notes')).toBe(true); - expect(requestUrl.searchParams.get('sha')).toBe('abc123'); - return Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ - sha: 'abc123', - note: 'hello notes', - ref_sha: 'def456', - }), - } as any); - }); - - const result = await repo.getNote({ sha: 'abc123' }); - expect(result).toEqual({ sha: 'abc123', note: 'hello notes', refSha: 'def456' }); - }); - - it('sends note payloads with createNote, appendNote, and deleteNote', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({ id: 'repo-notes-write' }); - - mockFetch.mockImplementationOnce((url, init) => { - expect(init?.method).toBe('POST'); - const requestUrl = new URL(url as string); - expect(requestUrl.pathname.endsWith('/repos/notes')).toBe(true); - expect(init?.body).toBeDefined(); - const payload = JSON.parse(init?.body as string); - expect(payload).toEqual({ - sha: 'abc123', - action: 'add', - note: 'note content', - }); - return Promise.resolve({ - ok: true, - status: 201, - statusText: 'Created', - headers: { get: () => 'application/json' } as any, - json: async () => ({ - sha: 'abc123', - target_ref: 'refs/notes/commits', - new_ref_sha: 'def456', - result: { success: true, status: 'ok' }, - }), - } as any); - }); - - const createResult = await repo.createNote({ sha: 'abc123', note: 'note content' }); - expect(createResult.targetRef).toBe('refs/notes/commits'); - - mockFetch.mockImplementationOnce((url, init) => { - expect(init?.method).toBe('POST'); - const requestUrl = new URL(url as string); - expect(requestUrl.pathname.endsWith('/repos/notes')).toBe(true); - expect(init?.body).toBeDefined(); - const payload = JSON.parse(init?.body as string); - expect(payload).toEqual({ - sha: 'abc123', - action: 'append', - note: 'note append', - }); - return Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - headers: { get: () => 'application/json' } as any, - json: async () => ({ - sha: 'abc123', - target_ref: 'refs/notes/commits', - new_ref_sha: 'def789', - result: { success: true, status: 'ok' }, - }), - } as any); - }); - - const appendResult = await repo.appendNote({ sha: 'abc123', note: 'note append' }); - expect(appendResult.targetRef).toBe('refs/notes/commits'); - - mockFetch.mockImplementationOnce((url, init) => { - expect(init?.method).toBe('DELETE'); - const requestUrl = new URL(url as string); - expect(requestUrl.pathname.endsWith('/repos/notes')).toBe(true); - expect(init?.body).toBeDefined(); - const payload = JSON.parse(init?.body as string); - expect(payload).toEqual({ sha: 'abc123' }); - return Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - headers: { get: () => 'application/json' } as any, - json: async () => ({ - sha: 'abc123', - target_ref: 'refs/notes/commits', - new_ref_sha: 'def456', - result: { success: true, status: 'ok' }, - }), - } as any); - }); - - const deleteResult = await repo.deleteNote({ sha: 'abc123' }); - expect(deleteResult.targetRef).toBe('refs/notes/commits'); - }); - - it('passes ephemeral flag to getFileStream', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({ id: 'repo-ephemeral-file' }); - - mockFetch.mockImplementationOnce((url, init) => { - expect(init?.method).toBe('GET'); - const requestUrl = new URL(url as string); - expect(requestUrl.pathname.endsWith('/repos/file')).toBe(true); - expect(requestUrl.searchParams.get('path')).toBe('docs/readme.md'); - expect(requestUrl.searchParams.get('ref')).toBe('feature/demo'); - expect(requestUrl.searchParams.get('ephemeral')).toBe('true'); - return Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - headers: { get: () => null } as any, - json: async () => ({}), - text: async () => '', - } as any); - }); - - const response = await repo.getFileStream({ - path: 'docs/readme.md', - ref: 'feature/demo', - ephemeral: true, - }); - - expect(response.ok).toBe(true); - expect(response.status).toBe(200); - }); - - it('passes ephemeral flag to listFiles', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({ id: 'repo-ephemeral-list' }); - - mockFetch.mockImplementationOnce((url, init) => { - expect(init?.method).toBe('GET'); - const requestUrl = new URL(url as string); - expect(requestUrl.pathname.endsWith('/repos/files')).toBe(true); - expect(requestUrl.searchParams.get('ref')).toBe('feature/demo'); - expect(requestUrl.searchParams.get('ephemeral')).toBe('true'); - return Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - headers: { get: () => null } as any, - json: async () => ({ - paths: ['docs/readme.md'], - ref: 'refs/namespaces/ephemeral/refs/heads/feature/demo', - }), - text: async () => '', - } as any); - }); - - const result = await repo.listFiles({ - ref: 'feature/demo', - ephemeral: true, - }); - - expect(result.paths).toEqual(['docs/readme.md']); - expect(result.ref).toBe('refs/namespaces/ephemeral/refs/heads/feature/demo'); - }); - - it('posts grep request body and parses response', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({ id: 'repo-grep' }); - - mockFetch.mockImplementationOnce((url, init) => { - expect(init?.method).toBe('POST'); - const requestUrl = new URL(url as string); - expect(requestUrl.pathname.endsWith('/repos/grep')).toBe(true); - - const body = JSON.parse(String(init?.body ?? '{}')); - expect(body).toEqual({ - rev: 'main', - paths: ['src/'], - query: { pattern: 'SEARCHME', case_sensitive: false }, - context: { before: 1, after: 2 }, - limits: { max_lines: 5, max_matches_per_file: 7 }, - pagination: { cursor: 'abc', limit: 3 }, - file_filters: { include_globs: ['**/*.ts'], exclude_globs: ['**/vendor/**'] }, - }); - - return Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - headers: { get: () => null } as any, - json: async () => ({ - query: { pattern: 'SEARCHME', case_sensitive: false }, - repo: { ref: 'main', commit: 'deadbeef' }, - matches: [ - { - path: 'src/a.ts', - lines: [{ line_number: 12, text: 'SEARCHME', type: 'match' }], - }, - ], - next_cursor: null, - has_more: false, - }), - text: async () => '', - } as any); - }); - - const result = await repo.grep({ - ref: 'main', - paths: ['src/'], - query: { pattern: 'SEARCHME', caseSensitive: false }, - fileFilters: { includeGlobs: ['**/*.ts'], excludeGlobs: ['**/vendor/**'] }, - context: { before: 1, after: 2 }, - limits: { maxLines: 5, maxMatchesPerFile: 7 }, - pagination: { cursor: 'abc', limit: 3 }, - }); - - expect(result.query).toEqual({ pattern: 'SEARCHME', caseSensitive: false }); - expect(result.repo).toEqual({ ref: 'main', commit: 'deadbeef' }); - expect(result.matches).toEqual([ - { - path: 'src/a.ts', - lines: [{ lineNumber: 12, text: 'SEARCHME', type: 'match' }], - }, - ]); - expect(result.nextCursor).toBeUndefined(); - expect(result.hasMore).toBe(false); - }); - - describe('createRepo', () => { - it('should return a repo with id and getRemoteURL function', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({}); - - expect(repo).toBeDefined(); - expect(repo.id).toMatch(/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/); // UUID format - expect(repo.getRemoteURL).toBeInstanceOf(Function); - - const url = await repo.getRemoteURL(); - expect(url).toMatch( - new RegExp(`^https:\\/\\/t:.+@v0\\.3p\\.pierre\\.rip\\/${repo.id}\\.git$`), - ); - expect(url).toContain('eyJ'); // JWT should contain base64 encoded content - }); - - it('should accept options for getRemoteURL', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({}); - - // Test with permissions and ttl - const url = await repo.getRemoteURL({ - permissions: ['git:write', 'git:read'], - ttl: 3600, - }); - expect(url).toMatch( - new RegExp(`^https:\\/\\/t:.+@v0\\.3p\\.pierre\\.rip\\/${repo.id}\\.git$`), - ); - expect(url).toContain('eyJ'); // JWT should contain base64 encoded content - }); - - it('should return ephemeral remote URL with +ephemeral suffix', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({}); - - const url = await repo.getEphemeralRemoteURL(); - expect(url).toMatch( - new RegExp(`^https:\\/\\/t:.+@v0\\.3p\\.pierre\\.rip\\/${repo.id}\\+ephemeral\\.git$`), - ); - expect(url).toContain('eyJ'); // JWT should contain base64 encoded content - expect(url).toContain('+ephemeral.git'); - }); - - it('should accept options for getEphemeralRemote', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({}); - - // Test with permissions and ttl - const url = await repo.getEphemeralRemoteURL({ - permissions: ['git:write', 'git:read'], - ttl: 3600, - }); - expect(url).toMatch( - new RegExp(`^https:\\/\\/t:.+@v0\\.3p\\.pierre\\.rip\\/${repo.id}\\+ephemeral\\.git$`), - ); - expect(url).toContain('eyJ'); // JWT should contain base64 encoded content - }); - - it('getRemoteURL and getEphemeralRemote should return different URLs', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({}); - - const defaultURL = await repo.getRemoteURL(); - const ephemeralURL = await repo.getEphemeralRemoteURL(); - - expect(defaultURL).not.toBe(ephemeralURL); - expect(defaultURL).toContain(`${repo.id}.git`); - expect(ephemeralURL).toContain(`${repo.id}+ephemeral.git`); - expect(ephemeralURL).not.toContain(`${repo.id}.git`); - }); - - it('should use provided id instead of generating UUID', async () => { - const store = new GitStorage({ name: 'v0', key }); - const customName = 'my-custom-repo-name'; - const repo = await store.createRepo({ id: customName }); - - expect(repo.id).toBe(customName); - - const url = await repo.getRemoteURL(); - expect(url).toContain(`/${customName}.git`); - }); - - it('should send baseRepo configuration with default defaultBranch when only baseRepo is provided', async () => { - const store = new GitStorage({ name: 'v0', key }); - const baseRepo = { - provider: 'github' as const, - owner: 'octocat', - name: 'hello-world', - defaultBranch: 'main', - }; - - await store.createRepo({ baseRepo }); - - // Check that fetch was called with baseRepo and default defaultBranch - expect(mockFetch).toHaveBeenCalledWith( - expect.any(String), - expect.objectContaining({ - method: 'POST', - body: JSON.stringify({ - base_repo: { - provider: 'github', - owner: 'octocat', - name: 'hello-world', - default_branch: 'main', - }, - default_branch: 'main', - }), - }), - ); - }); - - it('should send both baseRepo and custom defaultBranch when both are provided', async () => { - const store = new GitStorage({ name: 'v0', key }); - const baseRepo = { - provider: 'github' as const, - owner: 'octocat', - name: 'hello-world', - }; - const defaultBranch = 'develop'; - - await store.createRepo({ baseRepo, defaultBranch }); - - // Check that fetch was called with the correct body - expect(mockFetch).toHaveBeenCalledWith( - expect.any(String), - expect.objectContaining({ - method: 'POST', - body: JSON.stringify({ - base_repo: { - provider: 'github', - owner: 'octocat', - name: 'hello-world', - }, - default_branch: defaultBranch, - }), - }), - ); - }); - - it('should send fork baseRepo configuration with auth token', async () => { - const store = new GitStorage({ name: 'v0', key }); - const baseRepo = { - id: 'template-repo', - ref: 'develop', - }; - - mockFetch.mockResolvedValueOnce({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ repo_id: 'forked-repo', url: 'https://test.code.storage/repo.git' }), - }); - - const repo = await store.createRepo({ baseRepo }); - - expect(repo.defaultBranch).toBe('main'); - - const requestBody = JSON.parse((mockFetch.mock.calls[0][1] as RequestInit).body as string); - expect(requestBody.default_branch).toBeUndefined(); - expect(requestBody.base_repo).toEqual( - expect.objectContaining({ - provider: 'code', - owner: 'v0', - name: 'template-repo', - operation: 'fork', - ref: 'develop', - }), - ); - expect(requestBody.base_repo.auth?.token).toBeTruthy(); - - const payload = decodeJwtPayload(requestBody.base_repo.auth.token); - expect(payload.repo).toBe('template-repo'); - expect(payload.scopes).toEqual(['git:read']); - }); - - it('should default defaultBranch to "main" when not provided', async () => { - const store = new GitStorage({ name: 'v0', key }); - - await store.createRepo({}); - - // Check that fetch was called with default defaultBranch of 'main' - expect(mockFetch).toHaveBeenCalledWith( - expect.any(String), - expect.objectContaining({ - method: 'POST', - body: JSON.stringify({ - default_branch: 'main', - }), - }), - ); - }); - - it('should use custom defaultBranch when explicitly provided', async () => { - const store = new GitStorage({ name: 'v0', key }); - const customBranch = 'develop'; - - await store.createRepo({ defaultBranch: customBranch }); - - // Check that fetch was called with the custom defaultBranch - expect(mockFetch).toHaveBeenCalledWith( - expect.any(String), - expect.objectContaining({ - method: 'POST', - body: JSON.stringify({ - default_branch: customBranch, - }), - }), - ); - }); - - it('should handle repository already exists error', async () => { - const store = new GitStorage({ name: 'v0', key }); - - // Mock a 409 Conflict response - mockFetch.mockResolvedValue({ - ok: false, - status: 409, - statusText: 'Conflict', - }); - - await expect(store.createRepo({ id: 'existing-repo' })).rejects.toThrow( - 'Repository already exists', - ); - }); - }); - - describe('listRepos', () => { - it('should fetch repositories with org:read scope', async () => { - const store = new GitStorage({ name: 'v0', key }); - - mockFetch.mockImplementationOnce((url, init) => { - expect(init?.method).toBe('GET'); - expect(url).toBe('https://api.v0.3p.pierre.rip/api/v1/repos'); - - const headers = init?.headers as Record; - const payload = decodeJwtPayload(stripBearer(headers.Authorization)); - expect(payload.scopes).toEqual(['org:read']); - expect(payload.repo).toBe('org'); - - return Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ - repos: [ - { - repo_id: 'repo-1', - url: 'owner/repo-1', - default_branch: 'main', - created_at: '2024-01-01T00:00:00Z', - base_repo: { provider: 'github', owner: 'owner', name: 'repo-1' }, - }, - ], - next_cursor: null, - has_more: false, - }), - }); - }); - - const result = await store.listRepos(); - expect(result.repos).toHaveLength(1); - expect(result.repos[0].repoId).toBe('repo-1'); - expect(result.hasMore).toBe(false); - }); - - it('should pass cursor and limit params', async () => { - const store = new GitStorage({ name: 'v0', key }); - - mockFetch.mockImplementationOnce((url) => { - const requestUrl = new URL(url as string); - expect(requestUrl.pathname.endsWith('/api/v1/repos')).toBe(true); - expect(requestUrl.searchParams.get('cursor')).toBe('cursor-1'); - expect(requestUrl.searchParams.get('limit')).toBe('25'); - - return Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ - repos: [], - next_cursor: null, - has_more: false, - }), - }); - }); - - await store.listRepos({ cursor: 'cursor-1', limit: 25 }); - }); - }); - - describe('findOne', () => { - it('should return a repo with getRemoteURL function when found', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repoId = 'test-repo-id'; - const repo = await store.findOne({ id: repoId }); - - expect(repo).toBeDefined(); - expect(repo?.id).toBe(repoId); - expect(repo?.getRemoteURL).toBeInstanceOf(Function); - - const url = await repo?.getRemoteURL(); - expect(url).toMatch(/^https:\/\/t:.+@v0\.3p\.pierre\.rip\/test-repo-id\.git$/); - expect(url).toContain('eyJ'); // JWT should contain base64 encoded content - }); - - it('should handle getRemoteURL with options', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.findOne({ id: 'test-repo-id' }); - - expect(repo).toBeDefined(); - const url = await repo?.getRemoteURL({ - permissions: ['git:read'], - ttl: 7200, - }); - expect(url).toMatch(/^https:\/\/t:.+@v0\.3p\.pierre\.rip\/test-repo-id\.git$/); - expect(url).toContain('eyJ'); // JWT should contain base64 encoded content - }); - }); - - describe('deleteRepo', () => { - it('should delete a repository and return the result', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repoId = 'test-repo-to-delete'; - - mockFetch.mockResolvedValueOnce({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ - repo_id: repoId, - message: `Repository ${repoId} deletion initiated. Physical storage cleanup will complete asynchronously.`, - }), - } as any); - - const result = await store.deleteRepo({ id: repoId }); - - expect(result.repoId).toBe(repoId); - expect(result.message).toContain('deletion initiated'); - }); - - it('should send DELETE request with repo:write scope', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repoId = 'test-repo-delete-scope'; - - mockFetch.mockImplementationOnce((url, init) => { - expect(init?.method).toBe('DELETE'); - expect(url).toBe('https://api.v0.3p.pierre.rip/api/v1/repos/delete'); - - const headers = init?.headers as Record; - expect(headers.Authorization).toMatch(/^Bearer /); - - const payload = decodeJwtPayload(stripBearer(headers.Authorization)); - expect(payload.scopes).toEqual(['repo:write']); - expect(payload.repo).toBe(repoId); - - return Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ - repo_id: repoId, - message: 'Repository deletion initiated.', - }), - }); - }); - - await store.deleteRepo({ id: repoId }); - }); - - it('should throw error when repository not found', async () => { - const store = new GitStorage({ name: 'v0', key }); - - mockFetch.mockResolvedValueOnce({ - ok: false, - status: 404, - statusText: 'Not Found', - } as any); - - await expect(store.deleteRepo({ id: 'non-existent-repo' })).rejects.toThrow( - 'Repository not found', - ); - }); - - it('should throw error when repository already deleted', async () => { - const store = new GitStorage({ name: 'v0', key }); - - mockFetch.mockResolvedValueOnce({ - ok: false, - status: 409, - statusText: 'Conflict', - } as any); - - await expect(store.deleteRepo({ id: 'already-deleted-repo' })).rejects.toThrow( - 'Repository already deleted', - ); - }); - - it('should honor ttl option', async () => { - const store = new GitStorage({ name: 'v0', key }); - const customTTL = 300; - - mockFetch.mockImplementationOnce((_url, init) => { - const headers = init?.headers as Record; - const payload = decodeJwtPayload(stripBearer(headers.Authorization)); - expect(payload.exp - payload.iat).toBe(customTTL); - - return Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ - repo_id: 'test-repo', - message: 'Repository deletion initiated.', - }), - }); - }); - - await store.deleteRepo({ id: 'test-repo', ttl: customTTL }); - }); - }); - - describe('Repo createBranch', () => { - it('posts to create branch endpoint and returns parsed result', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({ id: 'repo-create-branch' }); - - mockFetch.mockImplementationOnce((url, init) => { - expect(url).toBe('https://api.v0.3p.pierre.rip/api/v1/repos/branches/create'); - - const requestInit = init as RequestInit; - expect(requestInit.method).toBe('POST'); - - const headers = requestInit.headers as Record; - expect(headers.Authorization).toMatch(/^Bearer /); - expect(headers['Content-Type']).toBe('application/json'); - - const body = JSON.parse(requestInit.body as string); - expect(body).toEqual({ - base_branch: 'main', - base_is_ephemeral: true, - target_branch: 'feature/demo', - target_is_ephemeral: true, - }); - - return Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ - message: 'branch created', - target_branch: 'feature/demo', - target_is_ephemeral: true, - commit_sha: 'abc123', - }), - } as any); - }); - - const result = await repo.createBranch({ - baseBranch: 'main', - targetBranch: 'feature/demo', - baseIsEphemeral: true, - targetIsEphemeral: true, - }); - - expect(result).toEqual({ - message: 'branch created', - targetBranch: 'feature/demo', - targetIsEphemeral: true, - commitSha: 'abc123', - }); - }); - - it('honors ttl override when creating a branch', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({ id: 'repo-create-branch-ttl' }); - - mockFetch.mockImplementationOnce((_url, init) => { - const requestInit = init as RequestInit; - const headers = requestInit.headers as Record; - const payload = decodeJwtPayload(stripBearer(headers.Authorization)); - expect(payload.scopes).toEqual(['git:write']); - expect(payload.exp - payload.iat).toBe(600); - - return Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ - message: 'branch created', - target_branch: 'feature/demo', - target_is_ephemeral: false, - }), - } as any); - }); - - const result = await repo.createBranch({ - baseBranch: 'main', - targetBranch: 'feature/demo', - ttl: 600, - }); - - expect(result).toEqual({ - message: 'branch created', - targetBranch: 'feature/demo', - targetIsEphemeral: false, - commitSha: undefined, - }); - }); - - it('requires both base and target branches', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({ id: 'repo-create-branch-validation' }); - - await expect( - repo.createBranch({ baseBranch: '', targetBranch: 'feature/demo' }), - ).rejects.toThrow('createBranch baseBranch is required'); - - await expect(repo.createBranch({ baseBranch: 'main', targetBranch: '' })).rejects.toThrow( - 'createBranch targetBranch is required', - ); - }); - }); - - describe('Repo getBranchDiff', () => { - it('forwards ephemeralBase flag to the API params', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({ id: 'repo-branch-diff-ephemeral-base' }); - - mockFetch.mockImplementationOnce((url) => { - const requestUrl = new URL(url as string); - expect(requestUrl.searchParams.get('branch')).toBe('refs/heads/feature/demo'); - expect(requestUrl.searchParams.get('base')).toBe('refs/heads/main'); - expect(requestUrl.searchParams.get('ephemeral_base')).toBe('true'); - - return Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ - branch: 'refs/heads/feature/demo', - base: 'refs/heads/main', - stats: { files: 1, additions: 1, deletions: 0, changes: 1 }, - files: [ - { - path: 'README.md', - state: 'modified', - old_path: null, - raw: '@@', - bytes: 10, - is_eof: true, - }, - ], - filtered_files: [], - }), - } as any); - }); - - const result = await repo.getBranchDiff({ - branch: 'refs/heads/feature/demo', - base: 'refs/heads/main', - ephemeralBase: true, - }); - - expect(result.branch).toBe('refs/heads/feature/demo'); - expect(result.base).toBe('refs/heads/main'); - }); - }); - - describe('Repo restoreCommit', () => { - it('should post metadata to the restore endpoint and return the response', async () => { - const store = new GitStorage({ name: 'v0', key }); - - const createRepoResponse = { - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ repo_id: 'test-repo-id', url: 'https://test.code.storage/repo.git' }), - }; - - const restoreResponse = { - commit: { - commit_sha: 'abcdef0123456789abcdef0123456789abcdef01', - tree_sha: 'fedcba9876543210fedcba9876543210fedcba98', - target_branch: 'main', - pack_bytes: 1024, - }, - result: { - branch: 'main', - old_sha: '0123456789abcdef0123456789abcdef01234567', - new_sha: '89abcdef0123456789abcdef0123456789abcdef', - success: true, - status: 'ok', - }, - }; - - mockFetch.mockResolvedValueOnce(createRepoResponse as any); - mockFetch.mockResolvedValueOnce({ - ok: true, - status: 201, - statusText: 'Created', - json: async () => restoreResponse, - } as any); - - const repo = await store.createRepo({}); - const response = await repo.restoreCommit({ - targetBranch: 'main', - expectedHeadSha: 'main', - targetCommitSha: '0123456789abcdef0123456789abcdef01234567', - commitMessage: 'Restore "feature"', - author: { - name: 'Author Name', - email: 'author@example.com', - }, - committer: { - name: 'Committer Name', - email: 'committer@example.com', - }, - }); - - expect(response).toEqual({ - commitSha: 'abcdef0123456789abcdef0123456789abcdef01', - treeSha: 'fedcba9876543210fedcba9876543210fedcba98', - targetBranch: 'main', - packBytes: 1024, - refUpdate: { - branch: 'main', - oldSha: '0123456789abcdef0123456789abcdef01234567', - newSha: '89abcdef0123456789abcdef0123456789abcdef', - }, - }); - - const [, restoreCall] = mockFetch.mock.calls; - expect(restoreCall[0]).toBe('https://api.v0.3p.pierre.rip/api/v1/repos/restore-commit'); - const requestInit = restoreCall[1] as RequestInit; - expect(requestInit.method).toBe('POST'); - expect(requestInit.headers).toMatchObject({ - Authorization: expect.stringMatching(/^Bearer\s.+/), - 'Content-Type': 'application/json', - }); - - const parsedBody = JSON.parse(requestInit.body as string); - expect(parsedBody).toEqual({ - metadata: { - target_branch: 'main', - expected_head_sha: 'main', - target_commit_sha: '0123456789abcdef0123456789abcdef01234567', - commit_message: 'Restore "feature"', - author: { - name: 'Author Name', - email: 'author@example.com', - }, - committer: { - name: 'Committer Name', - email: 'committer@example.com', - }, - }, - }); - }); - - it('throws RefUpdateError when restore fails with a conflict response', async () => { - const store = new GitStorage({ name: 'v0', key }); - - mockFetch.mockResolvedValueOnce({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ repo_id: 'test-repo-id', url: 'https://test.code.storage/repo.git' }), - } as any); - - mockFetch.mockResolvedValueOnce({ - ok: false, - status: 409, - statusText: 'Conflict', - json: async () => ({ - commit: { - commit_sha: 'cafefeedcafefeedcafefeedcafefeedcafefeed', - tree_sha: 'feedfacefeedfacefeedfacefeedfacefeedface', - target_branch: 'main', - pack_bytes: 0, - }, - result: { - branch: 'main', - old_sha: '0123456789abcdef0123456789abcdef01234567', - new_sha: 'cafefeedcafefeedcafefeedcafefeedcafefeed', - success: false, - status: 'precondition_failed', - message: 'branch moved', - }, - }), - } as any); - - const repo = await store.createRepo({}); - - await expect( - repo.restoreCommit({ - targetBranch: 'main', - expectedHeadSha: 'main', - targetCommitSha: '0123456789abcdef0123456789abcdef01234567', - author: { name: 'Author Name', email: 'author@example.com' }, - }), - ).rejects.toMatchObject({ - name: 'RefUpdateError', - message: 'branch moved', - status: 'precondition_failed', - reason: 'precondition_failed', - }); - }); - - it('throws RefUpdateError when restore returns an error payload without commit data', async () => { - const store = new GitStorage({ name: 'v0', key }); - - mockFetch.mockResolvedValueOnce({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ repo_id: 'test-repo-id', url: 'https://test.code.storage/repo.git' }), - } as any); - - mockFetch.mockResolvedValueOnce({ - ok: false, - status: 412, - statusText: 'Precondition Failed', - json: async () => ({ - commit: null, - result: { - success: false, - status: 'precondition_failed', - message: 'expected head SHA mismatch', - }, - }), - } as any); - - const repo = await store.createRepo({}); - - await expect( - repo.restoreCommit({ - targetBranch: 'main', - expectedHeadSha: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', - targetCommitSha: 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb', - author: { name: 'Author', email: 'author@example.com' }, - }), - ).rejects.toMatchObject({ - name: 'RefUpdateError', - message: 'expected head SHA mismatch', - status: 'precondition_failed', - reason: 'precondition_failed', - }); - }); - - it('surfaces 404 when restore-commit endpoint is unavailable', async () => { - const store = new GitStorage({ name: 'v0', key }); - - mockFetch.mockResolvedValueOnce({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ repo_id: 'test-repo-id', url: 'https://test.code.storage/repo.git' }), - } as any); - - mockFetch.mockResolvedValueOnce({ - ok: false, - status: 404, - statusText: 'Not Found', - json: async () => ({ error: 'not found' }), - } as any); - - const repo = await store.createRepo({}); - - await expect( - repo.restoreCommit({ - targetBranch: 'main', - targetCommitSha: '0123456789abcdef0123456789abcdef01234567', - author: { - name: 'Author Name', - email: 'author@example.com', - }, - }), - ).rejects.toMatchObject({ - name: 'RefUpdateError', - message: expect.stringContaining('HTTP 404'), - status: expect.any(String), - }); - }); - }); - - describe('createClient', () => { - it('should create a GitStorage instance', () => { - const client = createClient({ name: 'v0', key }); - expect(client).toBeInstanceOf(GitStorage); - }); - }); - - describe('CodeStorage alias', () => { - it('should be the same class as GitStorage', () => { - expect(CodeStorage).toBe(GitStorage); - }); - - it('should create a CodeStorage instance', () => { - const store = new CodeStorage({ name: 'v0', key }); - expect(store).toBeInstanceOf(CodeStorage); - expect(store).toBeInstanceOf(GitStorage); - }); - - it('should work identically to GitStorage', async () => { - const store = new CodeStorage({ name: 'v0', key }); - const repo = await store.createRepo({ id: 'test-repo' }); - - expect(repo).toBeDefined(); - expect(repo.id).toBe('test-repo'); - - const url = await repo.getRemoteURL(); - expect(url).toMatch(/^https:\/\/t:.+@v0\.3p\.pierre\.rip\/test-repo\.git$/); - }); - }); - - describe('JWT Generation', () => { - const extractJWT = (url: string): string => { - const match = url.match(/https:\/\/t:(.+)@v0\.3p\.pierre\.rip\/.+\.git/); - if (!match) throw new Error('JWT not found in URL'); - return match[1]; - }; - - it('should generate JWT with correct payload structure', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({}); - const url = await repo.getRemoteURL(); - - const jwt = extractJWT(url); - const payload = decodeJwtPayload(jwt); - - expect(payload).toHaveProperty('iss', 'v0'); - expect(payload).toHaveProperty('sub', '@pierre/storage'); - expect(payload).toHaveProperty('repo', repo.id); - expect(payload).toHaveProperty('scopes'); - expect(payload).toHaveProperty('iat'); - expect(payload).toHaveProperty('exp'); - expect(payload.exp).toBeGreaterThan(payload.iat); - }); - - it('should generate JWT with default permissions and TTL', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({}); - const url = await repo.getRemoteURL(); - - const jwt = extractJWT(url); - const payload = decodeJwtPayload(jwt); - - expect(payload.scopes).toEqual(['git:write', 'git:read']); - // Default TTL is 1 year (365 * 24 * 60 * 60 = 31536000 seconds) - expect(payload.exp - payload.iat).toBe(31536000); - }); - - it('should generate JWT with custom permissions and TTL', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({}); - const customTTL = 7200; // 2 hours - const customPermissions = ['git:read' as const]; - - const url = await repo.getRemoteURL({ - permissions: customPermissions, - ttl: customTTL, - }); - - const jwt = extractJWT(url); - const payload = decodeJwtPayload(jwt); - - expect(payload.scopes).toEqual(customPermissions); - expect(payload.exp - payload.iat).toBe(customTTL); - }); - - it('respects ttl option for getRemoteURL', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({}); - const legacyTTL = 1800; - - const url = await repo.getRemoteURL({ - ttl: legacyTTL, - }); - - const jwt = extractJWT(url); - const payload = decodeJwtPayload(jwt); - - expect(payload.exp - payload.iat).toBe(legacyTTL); - }); - - it('should generate valid JWT signature that can be verified', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({}); - const url = await repo.getRemoteURL(); - - const jwt = extractJWT(url); - const importedKey = await importPKCS8(key, 'ES256'); - - // This should not throw if the signature is valid - const { payload } = await jwtVerify(jwt, importedKey); - - expect(payload.iss).toBe('v0'); - expect(payload.repo).toBe(repo.id); - }); - - it('should generate different JWTs for different repos', async () => { - const store = new GitStorage({ name: 'v0', key }); - - const repo1 = await store.findOne({ id: 'repo-1' }); - const repo2 = await store.findOne({ id: 'repo-2' }); - - const url1 = await repo1?.getRemoteURL(); - const url2 = await repo2?.getRemoteURL(); - - const jwt1 = extractJWT(url1!); - const jwt2 = extractJWT(url2!); - - const payload1 = decodeJwtPayload(jwt1); - const payload2 = decodeJwtPayload(jwt2); - - expect(payload1.repo).toBe('repo-1'); - expect(payload2.repo).toBe('repo-2'); - expect(jwt1).not.toBe(jwt2); - }); - - it('should include repo ID in URL path and JWT payload', async () => { - const store = new GitStorage({ name: 'v0', key }); - const customRepoId = 'my-custom-repo'; - - const repo = await store.findOne({ id: customRepoId }); - const url = await repo?.getRemoteURL(); - - // Check URL contains repo ID - expect(url).toContain(`/${customRepoId}.git`); - - // Check JWT payload contains repo ID - const jwt = extractJWT(url!); - const payload = decodeJwtPayload(jwt); - expect(payload.repo).toBe(customRepoId); - }); - }); - - describe('API Methods', () => { - describe('deprecated ttl support', () => { - it('uses deprecated ttl when listing files', async () => { - const store = new GitStorage({ name: 'v0', key }); - const legacyTTL = 900; - - mockFetch.mockImplementationOnce(() => - Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ repo_id: 'legacy-ttl', url: 'https://repo.git' }), - }), - ); - - mockFetch.mockImplementationOnce(() => - Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ paths: [], ref: 'main' }), - }), - ); - - const repo = await store.createRepo({ id: 'legacy-ttl' }); - await repo.listFiles({ ttl: legacyTTL }); - - const lastCall = mockFetch.mock.calls[mockFetch.mock.calls.length - 1]; - const init = lastCall?.[1] as RequestInit | undefined; - const headers = init?.headers as Record | undefined; - expect(headers?.Authorization).toBeDefined(); - const payload = decodeJwtPayload(stripBearer(headers!.Authorization)); - expect(payload.exp - payload.iat).toBe(legacyTTL); - }); - }); - }); - - describe('Code-Storage-Agent Header', () => { - it('should include Code-Storage-Agent header in createRepo API calls', async () => { - let capturedHeaders: Record | undefined; - mockFetch.mockImplementationOnce((_url, init) => { - capturedHeaders = init?.headers as Record; - return Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ - repo_id: 'test-repo-id', - url: 'https://test.code.storage/repo.git', - }), - }); - }); - - const store = new GitStorage({ name: 'v0', key }); - await store.createRepo({ id: 'test-repo' }); - - expect(capturedHeaders).toBeDefined(); - expect(capturedHeaders?.['Code-Storage-Agent']).toBeDefined(); - expect(capturedHeaders?.['Code-Storage-Agent']).toMatch(/code-storage-sdk\/\d+\.\d+\.\d+/); - }); - - it('should include Code-Storage-Agent header in listCommits API calls', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({ id: 'test-commits' }); - - let capturedHeaders: Record | undefined; - mockFetch.mockImplementationOnce((_url, init) => { - capturedHeaders = init?.headers as Record; - return Promise.resolve({ - ok: true, - status: 200, - json: async () => ({ - commits: [], - next_cursor: undefined, - has_more: false, - }), - }); - }); - - await repo.listCommits(); - - expect(capturedHeaders).toBeDefined(); - expect(capturedHeaders?.['Code-Storage-Agent']).toBeDefined(); - expect(capturedHeaders?.['Code-Storage-Agent']).toMatch(/code-storage-sdk\/\d+\.\d+\.\d+/); - }); - - it('should include Code-Storage-Agent header in createBranch API calls', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({ id: 'test-branch' }); - - let capturedHeaders: Record | undefined; - mockFetch.mockImplementationOnce((_url, init) => { - capturedHeaders = init?.headers as Record; - return Promise.resolve({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ - message: 'branch created', - target_branch: 'feature/test', - target_is_ephemeral: false, - }), - } as any); - }); - - await repo.createBranch({ - baseBranch: 'main', - targetBranch: 'feature/test', - }); - - expect(capturedHeaders).toBeDefined(); - expect(capturedHeaders?.['Code-Storage-Agent']).toBeDefined(); - expect(capturedHeaders?.['Code-Storage-Agent']).toMatch(/code-storage-sdk\/\d+\.\d+\.\d+/); - }); - }); - - describe('URL Generation', () => { - describe('getDefaultAPIBaseUrl', () => { - it('should insert name into API base URL', () => { - // Assuming API_BASE_URL is 'https://api.3p.pierre.rip' - const result = GitStorage.getDefaultAPIBaseUrl('v0'); - expect(result).toBe('https://api.v0.3p.pierre.rip'); - }); - - it('should work with different names', () => { - const result1 = GitStorage.getDefaultAPIBaseUrl('v1'); - expect(result1).toBe('https://api.v1.3p.pierre.rip'); - - const result2 = GitStorage.getDefaultAPIBaseUrl('prod'); - expect(result2).toBe('https://api.prod.3p.pierre.rip'); - }); - }); - - describe('getDefaultStorageBaseUrl', () => { - it('should prepend name to storage base URL', () => { - // Assuming STORAGE_BASE_URL is '3p.pierre.rip' - const result = GitStorage.getDefaultStorageBaseUrl('v0'); - expect(result).toBe('v0.3p.pierre.rip'); - }); - - it('should work with different names', () => { - const result1 = GitStorage.getDefaultStorageBaseUrl('v1'); - expect(result1).toBe('v1.3p.pierre.rip'); - - const result2 = GitStorage.getDefaultStorageBaseUrl('prod'); - expect(result2).toBe('prod.3p.pierre.rip'); - }); - }); - - describe('URL construction with default values', () => { - it('should use getDefaultAPIBaseUrl when apiBaseUrl is not provided', async () => { - const store = new GitStorage({ name: 'v0', key }); - await store.createRepo({ id: 'test-repo' }); - - // Check that the API calls use the default API base URL with name inserted - expect(mockFetch).toHaveBeenCalledWith( - expect.stringContaining('api.v0.3p.pierre.rip'), - expect.any(Object), - ); - }); - - it('should use getDefaultStorageBaseUrl for remote URLs when storageBaseUrl is not provided', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({ id: 'test-repo' }); - - const url = await repo.getRemoteURL(); - expect(url).toMatch(/^https:\/\/t:.+@v0\.3p\.pierre\.rip\/test-repo\.git$/); - }); - - it('should use getDefaultStorageBaseUrl for ephemeral remote URLs when storageBaseUrl is not provided', async () => { - const store = new GitStorage({ name: 'v0', key }); - const repo = await store.createRepo({ id: 'test-repo' }); - - const url = await repo.getEphemeralRemoteURL(); - expect(url).toMatch(/^https:\/\/t:.+@v0\.3p\.pierre\.rip\/test-repo\+ephemeral\.git$/); - }); - }); - - describe('URL construction with custom values', () => { - it('should use custom apiBaseUrl when provided', async () => { - const customApiBaseUrl = 'custom-api.example.com'; - const store = new GitStorage({ name: 'v0', key, apiBaseUrl: customApiBaseUrl }); - await store.createRepo({ id: 'test-repo' }); - - // Check that the API calls use the custom API base URL - expect(mockFetch).toHaveBeenCalledWith( - expect.stringContaining(customApiBaseUrl), - expect.any(Object), - ); - }); - - it('should use custom storageBaseUrl for remote URLs when provided', async () => { - const customStorageBaseUrl = 'custom-storage.example.com'; - const store = new GitStorage({ name: 'v0', key, storageBaseUrl: customStorageBaseUrl }); - const repo = await store.createRepo({ id: 'test-repo' }); - - const url = await repo.getRemoteURL(); - expect(url).toMatch(/^https:\/\/t:.+@custom-storage\.example\.com\/test-repo\.git$/); - }); - - it('should use custom storageBaseUrl for ephemeral remote URLs when provided', async () => { - const customStorageBaseUrl = 'custom-storage.example.com'; - const store = new GitStorage({ name: 'v0', key, storageBaseUrl: customStorageBaseUrl }); - const repo = await store.createRepo({ id: 'test-repo' }); - - const url = await repo.getEphemeralRemoteURL(); - expect(url).toMatch( - /^https:\/\/t:.+@custom-storage\.example\.com\/test-repo\+ephemeral\.git$/, - ); - }); - - it('should use custom apiBaseUrl in createCommit transport', async () => { - const customApiBaseUrl = 'custom-api.example.com'; - const store = new GitStorage({ name: 'v0', key, apiBaseUrl: customApiBaseUrl }); - const repo = await store.createRepo({ id: 'test-repo' }); - - mockFetch.mockResolvedValueOnce({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ - commit: { - commit_sha: 'abc123', - tree_sha: 'def456', - target_branch: 'main', - pack_bytes: 1024, - blob_count: 1, - }, - result: { - branch: 'main', - old_sha: 'old123', - new_sha: 'new456', - success: true, - status: 'ok', - }, - }), - } as any); - - const builder = repo.createCommit({ - targetBranch: 'main', - author: { name: 'Test', email: 'test@example.com' }, - commitMessage: 'Test commit', - }); - - await builder.addFileFromString('test.txt', 'test content').send(); - - // Verify that the fetch was called with the custom API base URL - const lastCall = mockFetch.mock.calls[mockFetch.mock.calls.length - 1]; - expect(lastCall[0]).toContain(customApiBaseUrl); - }); - - it('should use custom apiBaseUrl in createCommitFromDiff', async () => { - const customApiBaseUrl = 'custom-api.example.com'; - const store = new GitStorage({ name: 'v0', key, apiBaseUrl: customApiBaseUrl }); - const repo = await store.createRepo({ id: 'test-repo' }); - - mockFetch.mockResolvedValueOnce({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ - commit: { - commit_sha: 'abc123', - tree_sha: 'def456', - target_branch: 'main', - pack_bytes: 1024, - blob_count: 1, - }, - result: { - branch: 'main', - old_sha: 'old123', - new_sha: 'new456', - success: true, - status: 'ok', - }, - }), - } as any); - - await repo.createCommitFromDiff({ - targetBranch: 'main', - author: { name: 'Test', email: 'test@example.com' }, - commitMessage: 'Test commit', - diff: 'diff content', - }); - - // Verify that the fetch was called with the custom API base URL - const lastCall = mockFetch.mock.calls[mockFetch.mock.calls.length - 1]; - expect(lastCall[0]).toContain(customApiBaseUrl); - }); - }); - - describe('Different name values', () => { - it('should generate correct URLs for different name values', async () => { - const names = ['v0', 'v1', 'staging', 'prod']; - - for (const name of names) { - mockFetch.mockReset(); - mockFetch.mockResolvedValue({ - ok: true, - status: 200, - statusText: 'OK', - json: async () => ({ repo_id: 'test-repo', url: 'https://test.code.storage/repo.git' }), - }); - - const store = new GitStorage({ name, key }); - const repo = await store.createRepo({ id: 'test-repo' }); - - const remoteUrl = await repo.getRemoteURL(); - expect(remoteUrl).toMatch( - new RegExp(`^https:\\/\\/t:.+@${name}\\.3p\\.pierre\\.rip\\/test-repo\\.git$`), - ); - - const ephemeralUrl = await repo.getEphemeralRemoteURL(); - expect(ephemeralUrl).toMatch( - new RegExp( - `^https:\\/\\/t:.+@${name}\\.3p\\.pierre\\.rip\\/test-repo\\+ephemeral\\.git$`, - ), - ); - - // Check API calls use the correct URL - expect(mockFetch).toHaveBeenCalledWith( - expect.stringContaining(`api.${name}.3p.pierre.rip`), - expect.any(Object), - ); - } - }); - }); - }); + beforeEach(() => { + // Reset mock before each test + mockFetch.mockReset(); + // Default successful response for createRepo + mockFetch.mockResolvedValue({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repo_id: 'test-repo-id', + url: 'https://test.code.storage/repo.git', + }), + }); + }); + describe('constructor', () => { + it('should create an instance with required options', () => { + const store = new GitStorage({ name: 'v0', key }); + expect(store).toBeInstanceOf(GitStorage); + }); + + it('should store the provided key', () => { + const store = new GitStorage({ name: 'v0', key }); + const config = store.getConfig(); + expect(config.key).toBe(key); + }); + + it('should throw error when key is missing', () => { + expect(() => { + // @ts-expect-error - Testing missing key + new GitStorage({}); + }).toThrow( + 'GitStorage requires a name and key. Please check your configuration and try again.' + ); + }); + + it('should throw error when name or key is null or undefined', () => { + expect(() => { + // @ts-expect-error - Testing null key + new GitStorage({ name: 'v0', key: null }); + }).toThrow( + 'GitStorage requires a name and key. Please check your configuration and try again.' + ); + + expect(() => { + // @ts-expect-error - Testing undefined key + new GitStorage({ name: 'v0', key: undefined }); + }).toThrow( + 'GitStorage requires a name and key. Please check your configuration and try again.' + ); + + expect(() => { + // @ts-expect-error - Testing null name + new GitStorage({ name: null, key: 'test-key' }); + }).toThrow( + 'GitStorage requires a name and key. Please check your configuration and try again.' + ); + + expect(() => { + // @ts-expect-error - Testing undefined name + new GitStorage({ name: undefined, key: 'test-key' }); + }).toThrow( + 'GitStorage requires a name and key. Please check your configuration and try again.' + ); + }); + + it('should throw error when key is empty string', () => { + expect(() => { + new GitStorage({ name: 'v0', key: '' }); + }).toThrow('GitStorage key must be a non-empty string.'); + }); + + it('should throw error when name is empty string', () => { + expect(() => { + new GitStorage({ name: '', key: 'test-key' }); + }).toThrow('GitStorage name must be a non-empty string.'); + }); + + it('should throw error when key is only whitespace', () => { + expect(() => { + new GitStorage({ name: 'v0', key: ' ' }); + }).toThrow('GitStorage key must be a non-empty string.'); + }); + + it('should throw error when name is only whitespace', () => { + expect(() => { + new GitStorage({ name: ' ', key: 'test-key' }); + }).toThrow('GitStorage name must be a non-empty string.'); + }); + + it('should throw error when key is not a string', () => { + expect(() => { + // @ts-expect-error - Testing non-string key + new GitStorage({ name: 'v0', key: 123 }); + }).toThrow('GitStorage key must be a non-empty string.'); + + expect(() => { + // @ts-expect-error - Testing non-string key + new GitStorage({ name: 'v0', key: {} }); + }).toThrow('GitStorage key must be a non-empty string.'); + }); + + it('should throw error when name is not a string', () => { + expect(() => { + // @ts-expect-error - Testing non-string name + new GitStorage({ name: 123, key: 'test-key' }); + }).toThrow('GitStorage name must be a non-empty string.'); + + expect(() => { + // @ts-expect-error - Testing non-string name + new GitStorage({ name: {}, key: 'test-key' }); + }).toThrow('GitStorage name must be a non-empty string.'); + }); + }); + + it('parses commit dates into Date instances', async () => { + const store = new GitStorage({ name: 'v0', key }); + + const repo = await store.createRepo({ id: 'repo-dates' }); + + const rawCommits = { + commits: [ + { + sha: 'abc123', + message: 'feat: add endpoint', + author_name: 'Jane Doe', + author_email: 'jane@example.com', + committer_name: 'Jane Doe', + committer_email: 'jane@example.com', + date: '2024-01-15T14:32:18Z', + }, + ], + next_cursor: undefined, + has_more: false, + }; + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + json: async () => rawCommits, + }) + ); + + const commits = await repo.listCommits(); + expect(commits.commits[0].rawDate).toBe('2024-01-15T14:32:18Z'); + expect(commits.commits[0].date).toBeInstanceOf(Date); + expect(commits.commits[0].date.toISOString()).toBe( + '2024-01-15T14:32:18.000Z' + ); + }); + + it('fetches git notes with getNote', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'repo-notes-read' }); + + mockFetch.mockImplementationOnce((url, init) => { + expect(init?.method).toBe('GET'); + const requestUrl = new URL(url as string); + expect(requestUrl.pathname.endsWith('/repos/notes')).toBe(true); + expect(requestUrl.searchParams.get('sha')).toBe('abc123'); + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + sha: 'abc123', + note: 'hello notes', + ref_sha: 'def456', + }), + } as any); + }); + + const result = await repo.getNote({ sha: 'abc123' }); + expect(result).toEqual({ + sha: 'abc123', + note: 'hello notes', + refSha: 'def456', + }); + }); + + it('sends note payloads with createNote, appendNote, and deleteNote', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'repo-notes-write' }); + + mockFetch.mockImplementationOnce((url, init) => { + expect(init?.method).toBe('POST'); + const requestUrl = new URL(url as string); + expect(requestUrl.pathname.endsWith('/repos/notes')).toBe(true); + expect(init?.body).toBeDefined(); + const payload = JSON.parse(init?.body as string); + expect(payload).toEqual({ + sha: 'abc123', + action: 'add', + note: 'note content', + }); + return Promise.resolve({ + ok: true, + status: 201, + statusText: 'Created', + headers: { get: () => 'application/json' } as any, + json: async () => ({ + sha: 'abc123', + target_ref: 'refs/notes/commits', + new_ref_sha: 'def456', + result: { success: true, status: 'ok' }, + }), + } as any); + }); + + const createResult = await repo.createNote({ + sha: 'abc123', + note: 'note content', + }); + expect(createResult.targetRef).toBe('refs/notes/commits'); + + mockFetch.mockImplementationOnce((url, init) => { + expect(init?.method).toBe('POST'); + const requestUrl = new URL(url as string); + expect(requestUrl.pathname.endsWith('/repos/notes')).toBe(true); + expect(init?.body).toBeDefined(); + const payload = JSON.parse(init?.body as string); + expect(payload).toEqual({ + sha: 'abc123', + action: 'append', + note: 'note append', + }); + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + headers: { get: () => 'application/json' } as any, + json: async () => ({ + sha: 'abc123', + target_ref: 'refs/notes/commits', + new_ref_sha: 'def789', + result: { success: true, status: 'ok' }, + }), + } as any); + }); + + const appendResult = await repo.appendNote({ + sha: 'abc123', + note: 'note append', + }); + expect(appendResult.targetRef).toBe('refs/notes/commits'); + + mockFetch.mockImplementationOnce((url, init) => { + expect(init?.method).toBe('DELETE'); + const requestUrl = new URL(url as string); + expect(requestUrl.pathname.endsWith('/repos/notes')).toBe(true); + expect(init?.body).toBeDefined(); + const payload = JSON.parse(init?.body as string); + expect(payload).toEqual({ sha: 'abc123' }); + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + headers: { get: () => 'application/json' } as any, + json: async () => ({ + sha: 'abc123', + target_ref: 'refs/notes/commits', + new_ref_sha: 'def456', + result: { success: true, status: 'ok' }, + }), + } as any); + }); + + const deleteResult = await repo.deleteNote({ sha: 'abc123' }); + expect(deleteResult.targetRef).toBe('refs/notes/commits'); + }); + + it('passes ephemeral flag to getFileStream', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'repo-ephemeral-file' }); + + mockFetch.mockImplementationOnce((url, init) => { + expect(init?.method).toBe('GET'); + const requestUrl = new URL(url as string); + expect(requestUrl.pathname.endsWith('/repos/file')).toBe(true); + expect(requestUrl.searchParams.get('path')).toBe('docs/readme.md'); + expect(requestUrl.searchParams.get('ref')).toBe('feature/demo'); + expect(requestUrl.searchParams.get('ephemeral')).toBe('true'); + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + headers: { get: () => null } as any, + json: async () => ({}), + text: async () => '', + } as any); + }); + + const response = await repo.getFileStream({ + path: 'docs/readme.md', + ref: 'feature/demo', + ephemeral: true, + }); + + expect(response.ok).toBe(true); + expect(response.status).toBe(200); + }); + + it('passes ephemeral flag to listFiles', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'repo-ephemeral-list' }); + + mockFetch.mockImplementationOnce((url, init) => { + expect(init?.method).toBe('GET'); + const requestUrl = new URL(url as string); + expect(requestUrl.pathname.endsWith('/repos/files')).toBe(true); + expect(requestUrl.searchParams.get('ref')).toBe('feature/demo'); + expect(requestUrl.searchParams.get('ephemeral')).toBe('true'); + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + headers: { get: () => null } as any, + json: async () => ({ + paths: ['docs/readme.md'], + ref: 'refs/namespaces/ephemeral/refs/heads/feature/demo', + }), + text: async () => '', + } as any); + }); + + const result = await repo.listFiles({ + ref: 'feature/demo', + ephemeral: true, + }); + + expect(result.paths).toEqual(['docs/readme.md']); + expect(result.ref).toBe( + 'refs/namespaces/ephemeral/refs/heads/feature/demo' + ); + }); + + it('posts grep request body and parses response', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'repo-grep' }); + + mockFetch.mockImplementationOnce((url, init) => { + expect(init?.method).toBe('POST'); + const requestUrl = new URL(url as string); + expect(requestUrl.pathname.endsWith('/repos/grep')).toBe(true); + + const body = JSON.parse(String(init?.body ?? '{}')); + expect(body).toEqual({ + rev: 'main', + paths: ['src/'], + query: { pattern: 'SEARCHME', case_sensitive: false }, + context: { before: 1, after: 2 }, + limits: { max_lines: 5, max_matches_per_file: 7 }, + pagination: { cursor: 'abc', limit: 3 }, + file_filters: { + include_globs: ['**/*.ts'], + exclude_globs: ['**/vendor/**'], + }, + }); + + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + headers: { get: () => null } as any, + json: async () => ({ + query: { pattern: 'SEARCHME', case_sensitive: false }, + repo: { ref: 'main', commit: 'deadbeef' }, + matches: [ + { + path: 'src/a.ts', + lines: [{ line_number: 12, text: 'SEARCHME', type: 'match' }], + }, + ], + next_cursor: null, + has_more: false, + }), + text: async () => '', + } as any); + }); + + const result = await repo.grep({ + ref: 'main', + paths: ['src/'], + query: { pattern: 'SEARCHME', caseSensitive: false }, + fileFilters: { + includeGlobs: ['**/*.ts'], + excludeGlobs: ['**/vendor/**'], + }, + context: { before: 1, after: 2 }, + limits: { maxLines: 5, maxMatchesPerFile: 7 }, + pagination: { cursor: 'abc', limit: 3 }, + }); + + expect(result.query).toEqual({ pattern: 'SEARCHME', caseSensitive: false }); + expect(result.repo).toEqual({ ref: 'main', commit: 'deadbeef' }); + expect(result.matches).toEqual([ + { + path: 'src/a.ts', + lines: [{ lineNumber: 12, text: 'SEARCHME', type: 'match' }], + }, + ]); + expect(result.nextCursor).toBeUndefined(); + expect(result.hasMore).toBe(false); + }); + + describe('createRepo', () => { + it('should return a repo with id and getRemoteURL function', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + + expect(repo).toBeDefined(); + expect(repo.id).toMatch( + /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/ + ); // UUID format + expect(repo.getRemoteURL).toBeInstanceOf(Function); + + const url = await repo.getRemoteURL(); + expect(url).toMatch( + new RegExp( + `^https:\\/\\/t:.+@v0\\.3p\\.pierre\\.rip\\/${repo.id}\\.git$` + ) + ); + expect(url).toContain('eyJ'); // JWT should contain base64 encoded content + }); + + it('should accept options for getRemoteURL', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + + // Test with permissions and ttl + const url = await repo.getRemoteURL({ + permissions: ['git:write', 'git:read'], + ttl: 3600, + }); + expect(url).toMatch( + new RegExp( + `^https:\\/\\/t:.+@v0\\.3p\\.pierre\\.rip\\/${repo.id}\\.git$` + ) + ); + expect(url).toContain('eyJ'); // JWT should contain base64 encoded content + }); + + it('should return ephemeral remote URL with +ephemeral suffix', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + + const url = await repo.getEphemeralRemoteURL(); + expect(url).toMatch( + new RegExp( + `^https:\\/\\/t:.+@v0\\.3p\\.pierre\\.rip\\/${repo.id}\\+ephemeral\\.git$` + ) + ); + expect(url).toContain('eyJ'); // JWT should contain base64 encoded content + expect(url).toContain('+ephemeral.git'); + }); + + it('should accept options for getEphemeralRemote', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + + // Test with permissions and ttl + const url = await repo.getEphemeralRemoteURL({ + permissions: ['git:write', 'git:read'], + ttl: 3600, + }); + expect(url).toMatch( + new RegExp( + `^https:\\/\\/t:.+@v0\\.3p\\.pierre\\.rip\\/${repo.id}\\+ephemeral\\.git$` + ) + ); + expect(url).toContain('eyJ'); // JWT should contain base64 encoded content + }); + + it('getRemoteURL and getEphemeralRemote should return different URLs', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + + const defaultURL = await repo.getRemoteURL(); + const ephemeralURL = await repo.getEphemeralRemoteURL(); + + expect(defaultURL).not.toBe(ephemeralURL); + expect(defaultURL).toContain(`${repo.id}.git`); + expect(ephemeralURL).toContain(`${repo.id}+ephemeral.git`); + expect(ephemeralURL).not.toContain(`${repo.id}.git`); + }); + + it('should use provided id instead of generating UUID', async () => { + const store = new GitStorage({ name: 'v0', key }); + const customName = 'my-custom-repo-name'; + const repo = await store.createRepo({ id: customName }); + + expect(repo.id).toBe(customName); + + const url = await repo.getRemoteURL(); + expect(url).toContain(`/${customName}.git`); + }); + + it('should send baseRepo configuration with default defaultBranch when only baseRepo is provided', async () => { + const store = new GitStorage({ name: 'v0', key }); + const baseRepo = { + provider: 'github' as const, + owner: 'octocat', + name: 'hello-world', + defaultBranch: 'main', + }; + + await store.createRepo({ baseRepo }); + + // Check that fetch was called with baseRepo and default defaultBranch + expect(mockFetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + method: 'POST', + body: JSON.stringify({ + base_repo: { + provider: 'github', + owner: 'octocat', + name: 'hello-world', + default_branch: 'main', + }, + default_branch: 'main', + }), + }) + ); + }); + + it('should send both baseRepo and custom defaultBranch when both are provided', async () => { + const store = new GitStorage({ name: 'v0', key }); + const baseRepo = { + provider: 'github' as const, + owner: 'octocat', + name: 'hello-world', + }; + const defaultBranch = 'develop'; + + await store.createRepo({ baseRepo, defaultBranch }); + + // Check that fetch was called with the correct body + expect(mockFetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + method: 'POST', + body: JSON.stringify({ + base_repo: { + provider: 'github', + owner: 'octocat', + name: 'hello-world', + }, + default_branch: defaultBranch, + }), + }) + ); + }); + + it('should send fork baseRepo configuration with auth token', async () => { + const store = new GitStorage({ name: 'v0', key }); + const baseRepo = { + id: 'template-repo', + ref: 'develop', + }; + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repo_id: 'forked-repo', + url: 'https://test.code.storage/repo.git', + }), + }); + + const repo = await store.createRepo({ baseRepo }); + + expect(repo.defaultBranch).toBe('main'); + + const requestBody = JSON.parse( + (mockFetch.mock.calls[0][1] as RequestInit).body as string + ); + expect(requestBody.default_branch).toBeUndefined(); + expect(requestBody.base_repo).toEqual( + expect.objectContaining({ + provider: 'code', + owner: 'v0', + name: 'template-repo', + operation: 'fork', + ref: 'develop', + }) + ); + expect(requestBody.base_repo.auth?.token).toBeTruthy(); + + const payload = decodeJwtPayload(requestBody.base_repo.auth.token); + expect(payload.repo).toBe('template-repo'); + expect(payload.scopes).toEqual(['git:read']); + }); + + it('should default defaultBranch to "main" when not provided', async () => { + const store = new GitStorage({ name: 'v0', key }); + + await store.createRepo({}); + + // Check that fetch was called with default defaultBranch of 'main' + expect(mockFetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + method: 'POST', + body: JSON.stringify({ + default_branch: 'main', + }), + }) + ); + }); + + it('should use custom defaultBranch when explicitly provided', async () => { + const store = new GitStorage({ name: 'v0', key }); + const customBranch = 'develop'; + + await store.createRepo({ defaultBranch: customBranch }); + + // Check that fetch was called with the custom defaultBranch + expect(mockFetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + method: 'POST', + body: JSON.stringify({ + default_branch: customBranch, + }), + }) + ); + }); + + it('should handle repository already exists error', async () => { + const store = new GitStorage({ name: 'v0', key }); + + // Mock a 409 Conflict response + mockFetch.mockResolvedValue({ + ok: false, + status: 409, + statusText: 'Conflict', + }); + + await expect(store.createRepo({ id: 'existing-repo' })).rejects.toThrow( + 'Repository already exists' + ); + }); + }); + + describe('listRepos', () => { + it('should fetch repositories with org:read scope', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce((url, init) => { + expect(init?.method).toBe('GET'); + expect(url).toBe('https://api.v0.3p.pierre.rip/api/v1/repos'); + + const headers = init?.headers as Record; + const payload = decodeJwtPayload(stripBearer(headers.Authorization)); + expect(payload.scopes).toEqual(['org:read']); + expect(payload.repo).toBe('org'); + + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repos: [ + { + repo_id: 'repo-1', + url: 'owner/repo-1', + default_branch: 'main', + created_at: '2024-01-01T00:00:00Z', + base_repo: { + provider: 'github', + owner: 'owner', + name: 'repo-1', + }, + }, + ], + next_cursor: null, + has_more: false, + }), + }); + }); + + const result = await store.listRepos(); + expect(result.repos).toHaveLength(1); + expect(result.repos[0].repoId).toBe('repo-1'); + expect(result.hasMore).toBe(false); + }); + + it('should pass cursor and limit params', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockImplementationOnce((url) => { + const requestUrl = new URL(url as string); + expect(requestUrl.pathname.endsWith('/api/v1/repos')).toBe(true); + expect(requestUrl.searchParams.get('cursor')).toBe('cursor-1'); + expect(requestUrl.searchParams.get('limit')).toBe('25'); + + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repos: [], + next_cursor: null, + has_more: false, + }), + }); + }); + + await store.listRepos({ cursor: 'cursor-1', limit: 25 }); + }); + }); + + describe('findOne', () => { + it('should return a repo with getRemoteURL function when found', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repoId = 'test-repo-id'; + const repo = await store.findOne({ id: repoId }); + + expect(repo).toBeDefined(); + expect(repo?.id).toBe(repoId); + expect(repo?.getRemoteURL).toBeInstanceOf(Function); + + const url = await repo?.getRemoteURL(); + expect(url).toMatch( + /^https:\/\/t:.+@v0\.3p\.pierre\.rip\/test-repo-id\.git$/ + ); + expect(url).toContain('eyJ'); // JWT should contain base64 encoded content + }); + + it('should handle getRemoteURL with options', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.findOne({ id: 'test-repo-id' }); + + expect(repo).toBeDefined(); + const url = await repo?.getRemoteURL({ + permissions: ['git:read'], + ttl: 7200, + }); + expect(url).toMatch( + /^https:\/\/t:.+@v0\.3p\.pierre\.rip\/test-repo-id\.git$/ + ); + expect(url).toContain('eyJ'); // JWT should contain base64 encoded content + }); + }); + + describe('deleteRepo', () => { + it('should delete a repository and return the result', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repoId = 'test-repo-to-delete'; + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repo_id: repoId, + message: `Repository ${repoId} deletion initiated. Physical storage cleanup will complete asynchronously.`, + }), + } as any); + + const result = await store.deleteRepo({ id: repoId }); + + expect(result.repoId).toBe(repoId); + expect(result.message).toContain('deletion initiated'); + }); + + it('should send DELETE request with repo:write scope', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repoId = 'test-repo-delete-scope'; + + mockFetch.mockImplementationOnce((url, init) => { + expect(init?.method).toBe('DELETE'); + expect(url).toBe('https://api.v0.3p.pierre.rip/api/v1/repos/delete'); + + const headers = init?.headers as Record; + expect(headers.Authorization).toMatch(/^Bearer /); + + const payload = decodeJwtPayload(stripBearer(headers.Authorization)); + expect(payload.scopes).toEqual(['repo:write']); + expect(payload.repo).toBe(repoId); + + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repo_id: repoId, + message: 'Repository deletion initiated.', + }), + }); + }); + + await store.deleteRepo({ id: repoId }); + }); + + it('should throw error when repository not found', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockResolvedValueOnce({ + ok: false, + status: 404, + statusText: 'Not Found', + } as any); + + await expect( + store.deleteRepo({ id: 'non-existent-repo' }) + ).rejects.toThrow('Repository not found'); + }); + + it('should throw error when repository already deleted', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockResolvedValueOnce({ + ok: false, + status: 409, + statusText: 'Conflict', + } as any); + + await expect( + store.deleteRepo({ id: 'already-deleted-repo' }) + ).rejects.toThrow('Repository already deleted'); + }); + + it('should honor ttl option', async () => { + const store = new GitStorage({ name: 'v0', key }); + const customTTL = 300; + + mockFetch.mockImplementationOnce((_url, init) => { + const headers = init?.headers as Record; + const payload = decodeJwtPayload(stripBearer(headers.Authorization)); + expect(payload.exp - payload.iat).toBe(customTTL); + + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repo_id: 'test-repo', + message: 'Repository deletion initiated.', + }), + }); + }); + + await store.deleteRepo({ id: 'test-repo', ttl: customTTL }); + }); + }); + + describe('Repo createBranch', () => { + it('posts to create branch endpoint and returns parsed result', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'repo-create-branch' }); + + mockFetch.mockImplementationOnce((url, init) => { + expect(url).toBe( + 'https://api.v0.3p.pierre.rip/api/v1/repos/branches/create' + ); + + const requestInit = init as RequestInit; + expect(requestInit.method).toBe('POST'); + + const headers = requestInit.headers as Record; + expect(headers.Authorization).toMatch(/^Bearer /); + expect(headers['Content-Type']).toBe('application/json'); + + const body = JSON.parse(requestInit.body as string); + expect(body).toEqual({ + base_branch: 'main', + base_is_ephemeral: true, + target_branch: 'feature/demo', + target_is_ephemeral: true, + }); + + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + message: 'branch created', + target_branch: 'feature/demo', + target_is_ephemeral: true, + commit_sha: 'abc123', + }), + } as any); + }); + + const result = await repo.createBranch({ + baseBranch: 'main', + targetBranch: 'feature/demo', + baseIsEphemeral: true, + targetIsEphemeral: true, + }); + + expect(result).toEqual({ + message: 'branch created', + targetBranch: 'feature/demo', + targetIsEphemeral: true, + commitSha: 'abc123', + }); + }); + + it('honors ttl override when creating a branch', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'repo-create-branch-ttl' }); + + mockFetch.mockImplementationOnce((_url, init) => { + const requestInit = init as RequestInit; + const headers = requestInit.headers as Record; + const payload = decodeJwtPayload(stripBearer(headers.Authorization)); + expect(payload.scopes).toEqual(['git:write']); + expect(payload.exp - payload.iat).toBe(600); + + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + message: 'branch created', + target_branch: 'feature/demo', + target_is_ephemeral: false, + }), + } as any); + }); + + const result = await repo.createBranch({ + baseBranch: 'main', + targetBranch: 'feature/demo', + ttl: 600, + }); + + expect(result).toEqual({ + message: 'branch created', + targetBranch: 'feature/demo', + targetIsEphemeral: false, + commitSha: undefined, + }); + }); + + it('requires both base and target branches', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ + id: 'repo-create-branch-validation', + }); + + await expect( + repo.createBranch({ baseBranch: '', targetBranch: 'feature/demo' }) + ).rejects.toThrow('createBranch baseBranch is required'); + + await expect( + repo.createBranch({ baseBranch: 'main', targetBranch: '' }) + ).rejects.toThrow('createBranch targetBranch is required'); + }); + }); + + describe('Repo getBranchDiff', () => { + it('forwards ephemeralBase flag to the API params', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ + id: 'repo-branch-diff-ephemeral-base', + }); + + mockFetch.mockImplementationOnce((url) => { + const requestUrl = new URL(url as string); + expect(requestUrl.searchParams.get('branch')).toBe( + 'refs/heads/feature/demo' + ); + expect(requestUrl.searchParams.get('base')).toBe('refs/heads/main'); + expect(requestUrl.searchParams.get('ephemeral_base')).toBe('true'); + + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + branch: 'refs/heads/feature/demo', + base: 'refs/heads/main', + stats: { files: 1, additions: 1, deletions: 0, changes: 1 }, + files: [ + { + path: 'README.md', + state: 'modified', + old_path: null, + raw: '@@', + bytes: 10, + is_eof: true, + }, + ], + filtered_files: [], + }), + } as any); + }); + + const result = await repo.getBranchDiff({ + branch: 'refs/heads/feature/demo', + base: 'refs/heads/main', + ephemeralBase: true, + }); + + expect(result.branch).toBe('refs/heads/feature/demo'); + expect(result.base).toBe('refs/heads/main'); + }); + }); + + describe('Repo restoreCommit', () => { + it('should post metadata to the restore endpoint and return the response', async () => { + const store = new GitStorage({ name: 'v0', key }); + + const createRepoResponse = { + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repo_id: 'test-repo-id', + url: 'https://test.code.storage/repo.git', + }), + }; + + const restoreResponse = { + commit: { + commit_sha: 'abcdef0123456789abcdef0123456789abcdef01', + tree_sha: 'fedcba9876543210fedcba9876543210fedcba98', + target_branch: 'main', + pack_bytes: 1024, + }, + result: { + branch: 'main', + old_sha: '0123456789abcdef0123456789abcdef01234567', + new_sha: '89abcdef0123456789abcdef0123456789abcdef', + success: true, + status: 'ok', + }, + }; + + mockFetch.mockResolvedValueOnce(createRepoResponse as any); + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 201, + statusText: 'Created', + json: async () => restoreResponse, + } as any); + + const repo = await store.createRepo({}); + const response = await repo.restoreCommit({ + targetBranch: 'main', + expectedHeadSha: 'main', + targetCommitSha: '0123456789abcdef0123456789abcdef01234567', + commitMessage: 'Restore "feature"', + author: { + name: 'Author Name', + email: 'author@example.com', + }, + committer: { + name: 'Committer Name', + email: 'committer@example.com', + }, + }); + + expect(response).toEqual({ + commitSha: 'abcdef0123456789abcdef0123456789abcdef01', + treeSha: 'fedcba9876543210fedcba9876543210fedcba98', + targetBranch: 'main', + packBytes: 1024, + refUpdate: { + branch: 'main', + oldSha: '0123456789abcdef0123456789abcdef01234567', + newSha: '89abcdef0123456789abcdef0123456789abcdef', + }, + }); + + const [, restoreCall] = mockFetch.mock.calls; + expect(restoreCall[0]).toBe( + 'https://api.v0.3p.pierre.rip/api/v1/repos/restore-commit' + ); + const requestInit = restoreCall[1] as RequestInit; + expect(requestInit.method).toBe('POST'); + expect(requestInit.headers).toMatchObject({ + Authorization: expect.stringMatching(/^Bearer\s.+/), + 'Content-Type': 'application/json', + }); + + const parsedBody = JSON.parse(requestInit.body as string); + expect(parsedBody).toEqual({ + metadata: { + target_branch: 'main', + expected_head_sha: 'main', + target_commit_sha: '0123456789abcdef0123456789abcdef01234567', + commit_message: 'Restore "feature"', + author: { + name: 'Author Name', + email: 'author@example.com', + }, + committer: { + name: 'Committer Name', + email: 'committer@example.com', + }, + }, + }); + }); + + it('throws RefUpdateError when restore fails with a conflict response', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repo_id: 'test-repo-id', + url: 'https://test.code.storage/repo.git', + }), + } as any); + + mockFetch.mockResolvedValueOnce({ + ok: false, + status: 409, + statusText: 'Conflict', + json: async () => ({ + commit: { + commit_sha: 'cafefeedcafefeedcafefeedcafefeedcafefeed', + tree_sha: 'feedfacefeedfacefeedfacefeedfacefeedface', + target_branch: 'main', + pack_bytes: 0, + }, + result: { + branch: 'main', + old_sha: '0123456789abcdef0123456789abcdef01234567', + new_sha: 'cafefeedcafefeedcafefeedcafefeedcafefeed', + success: false, + status: 'precondition_failed', + message: 'branch moved', + }, + }), + } as any); + + const repo = await store.createRepo({}); + + await expect( + repo.restoreCommit({ + targetBranch: 'main', + expectedHeadSha: 'main', + targetCommitSha: '0123456789abcdef0123456789abcdef01234567', + author: { name: 'Author Name', email: 'author@example.com' }, + }) + ).rejects.toMatchObject({ + name: 'RefUpdateError', + message: 'branch moved', + status: 'precondition_failed', + reason: 'precondition_failed', + }); + }); + + it('throws RefUpdateError when restore returns an error payload without commit data', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repo_id: 'test-repo-id', + url: 'https://test.code.storage/repo.git', + }), + } as any); + + mockFetch.mockResolvedValueOnce({ + ok: false, + status: 412, + statusText: 'Precondition Failed', + json: async () => ({ + commit: null, + result: { + success: false, + status: 'precondition_failed', + message: 'expected head SHA mismatch', + }, + }), + } as any); + + const repo = await store.createRepo({}); + + await expect( + repo.restoreCommit({ + targetBranch: 'main', + expectedHeadSha: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', + targetCommitSha: 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb', + author: { name: 'Author', email: 'author@example.com' }, + }) + ).rejects.toMatchObject({ + name: 'RefUpdateError', + message: 'expected head SHA mismatch', + status: 'precondition_failed', + reason: 'precondition_failed', + }); + }); + + it('surfaces 404 when restore-commit endpoint is unavailable', async () => { + const store = new GitStorage({ name: 'v0', key }); + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repo_id: 'test-repo-id', + url: 'https://test.code.storage/repo.git', + }), + } as any); + + mockFetch.mockResolvedValueOnce({ + ok: false, + status: 404, + statusText: 'Not Found', + json: async () => ({ error: 'not found' }), + } as any); + + const repo = await store.createRepo({}); + + await expect( + repo.restoreCommit({ + targetBranch: 'main', + targetCommitSha: '0123456789abcdef0123456789abcdef01234567', + author: { + name: 'Author Name', + email: 'author@example.com', + }, + }) + ).rejects.toMatchObject({ + name: 'RefUpdateError', + message: expect.stringContaining('HTTP 404'), + status: expect.any(String), + }); + }); + }); + + describe('createClient', () => { + it('should create a GitStorage instance', () => { + const client = createClient({ name: 'v0', key }); + expect(client).toBeInstanceOf(GitStorage); + }); + }); + + describe('CodeStorage alias', () => { + it('should be the same class as GitStorage', () => { + expect(CodeStorage).toBe(GitStorage); + }); + + it('should create a CodeStorage instance', () => { + const store = new CodeStorage({ name: 'v0', key }); + expect(store).toBeInstanceOf(CodeStorage); + expect(store).toBeInstanceOf(GitStorage); + }); + + it('should work identically to GitStorage', async () => { + const store = new CodeStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'test-repo' }); + + expect(repo).toBeDefined(); + expect(repo.id).toBe('test-repo'); + + const url = await repo.getRemoteURL(); + expect(url).toMatch( + /^https:\/\/t:.+@v0\.3p\.pierre\.rip\/test-repo\.git$/ + ); + }); + }); + + describe('JWT Generation', () => { + const extractJWT = (url: string): string => { + const match = url.match(/https:\/\/t:(.+)@v0\.3p\.pierre\.rip\/.+\.git/); + if (!match) throw new Error('JWT not found in URL'); + return match[1]; + }; + + it('should generate JWT with correct payload structure', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + const url = await repo.getRemoteURL(); + + const jwt = extractJWT(url); + const payload = decodeJwtPayload(jwt); + + expect(payload).toHaveProperty('iss', 'v0'); + expect(payload).toHaveProperty('sub', '@pierre/storage'); + expect(payload).toHaveProperty('repo', repo.id); + expect(payload).toHaveProperty('scopes'); + expect(payload).toHaveProperty('iat'); + expect(payload).toHaveProperty('exp'); + expect(payload.exp).toBeGreaterThan(payload.iat); + }); + + it('should generate JWT with default permissions and TTL', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + const url = await repo.getRemoteURL(); + + const jwt = extractJWT(url); + const payload = decodeJwtPayload(jwt); + + expect(payload.scopes).toEqual(['git:write', 'git:read']); + // Default TTL is 1 year (365 * 24 * 60 * 60 = 31536000 seconds) + expect(payload.exp - payload.iat).toBe(31536000); + }); + + it('should generate JWT with custom permissions and TTL', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + const customTTL = 7200; // 2 hours + const customPermissions = ['git:read' as const]; + + const url = await repo.getRemoteURL({ + permissions: customPermissions, + ttl: customTTL, + }); + + const jwt = extractJWT(url); + const payload = decodeJwtPayload(jwt); + + expect(payload.scopes).toEqual(customPermissions); + expect(payload.exp - payload.iat).toBe(customTTL); + }); + + it('respects ttl option for getRemoteURL', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + const legacyTTL = 1800; + + const url = await repo.getRemoteURL({ + ttl: legacyTTL, + }); + + const jwt = extractJWT(url); + const payload = decodeJwtPayload(jwt); + + expect(payload.exp - payload.iat).toBe(legacyTTL); + }); + + it('should generate valid JWT signature that can be verified', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({}); + const url = await repo.getRemoteURL(); + + const jwt = extractJWT(url); + const importedKey = await importPKCS8(key, 'ES256'); + + // This should not throw if the signature is valid + const { payload } = await jwtVerify(jwt, importedKey); + + expect(payload.iss).toBe('v0'); + expect(payload.repo).toBe(repo.id); + }); + + it('should generate different JWTs for different repos', async () => { + const store = new GitStorage({ name: 'v0', key }); + + const repo1 = await store.findOne({ id: 'repo-1' }); + const repo2 = await store.findOne({ id: 'repo-2' }); + + const url1 = await repo1?.getRemoteURL(); + const url2 = await repo2?.getRemoteURL(); + + const jwt1 = extractJWT(url1!); + const jwt2 = extractJWT(url2!); + + const payload1 = decodeJwtPayload(jwt1); + const payload2 = decodeJwtPayload(jwt2); + + expect(payload1.repo).toBe('repo-1'); + expect(payload2.repo).toBe('repo-2'); + expect(jwt1).not.toBe(jwt2); + }); + + it('should include repo ID in URL path and JWT payload', async () => { + const store = new GitStorage({ name: 'v0', key }); + const customRepoId = 'my-custom-repo'; + + const repo = await store.findOne({ id: customRepoId }); + const url = await repo?.getRemoteURL(); + + // Check URL contains repo ID + expect(url).toContain(`/${customRepoId}.git`); + + // Check JWT payload contains repo ID + const jwt = extractJWT(url!); + const payload = decodeJwtPayload(jwt); + expect(payload.repo).toBe(customRepoId); + }); + }); + + describe('API Methods', () => { + describe('deprecated ttl support', () => { + it('uses deprecated ttl when listing files', async () => { + const store = new GitStorage({ name: 'v0', key }); + const legacyTTL = 900; + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repo_id: 'legacy-ttl', + url: 'https://repo.git', + }), + }) + ); + + mockFetch.mockImplementationOnce(() => + Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ paths: [], ref: 'main' }), + }) + ); + + const repo = await store.createRepo({ id: 'legacy-ttl' }); + await repo.listFiles({ ttl: legacyTTL }); + + const lastCall = mockFetch.mock.calls[mockFetch.mock.calls.length - 1]; + const init = lastCall?.[1] as RequestInit | undefined; + const headers = init?.headers as Record | undefined; + expect(headers?.Authorization).toBeDefined(); + const payload = decodeJwtPayload(stripBearer(headers!.Authorization)); + expect(payload.exp - payload.iat).toBe(legacyTTL); + }); + }); + }); + + describe('Code-Storage-Agent Header', () => { + it('should include Code-Storage-Agent header in createRepo API calls', async () => { + let capturedHeaders: Record | undefined; + mockFetch.mockImplementationOnce((_url, init) => { + capturedHeaders = init?.headers as Record; + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repo_id: 'test-repo-id', + url: 'https://test.code.storage/repo.git', + }), + }); + }); + + const store = new GitStorage({ name: 'v0', key }); + await store.createRepo({ id: 'test-repo' }); + + expect(capturedHeaders).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toMatch( + /code-storage-sdk\/\d+\.\d+\.\d+/ + ); + }); + + it('should include Code-Storage-Agent header in listCommits API calls', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'test-commits' }); + + let capturedHeaders: Record | undefined; + mockFetch.mockImplementationOnce((_url, init) => { + capturedHeaders = init?.headers as Record; + return Promise.resolve({ + ok: true, + status: 200, + json: async () => ({ + commits: [], + next_cursor: undefined, + has_more: false, + }), + }); + }); + + await repo.listCommits(); + + expect(capturedHeaders).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toMatch( + /code-storage-sdk\/\d+\.\d+\.\d+/ + ); + }); + + it('should include Code-Storage-Agent header in createBranch API calls', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'test-branch' }); + + let capturedHeaders: Record | undefined; + mockFetch.mockImplementationOnce((_url, init) => { + capturedHeaders = init?.headers as Record; + return Promise.resolve({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + message: 'branch created', + target_branch: 'feature/test', + target_is_ephemeral: false, + }), + } as any); + }); + + await repo.createBranch({ + baseBranch: 'main', + targetBranch: 'feature/test', + }); + + expect(capturedHeaders).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toBeDefined(); + expect(capturedHeaders?.['Code-Storage-Agent']).toMatch( + /code-storage-sdk\/\d+\.\d+\.\d+/ + ); + }); + }); + + describe('URL Generation', () => { + describe('getDefaultAPIBaseUrl', () => { + it('should insert name into API base URL', () => { + // Assuming API_BASE_URL is 'https://api.3p.pierre.rip' + const result = GitStorage.getDefaultAPIBaseUrl('v0'); + expect(result).toBe('https://api.v0.3p.pierre.rip'); + }); + + it('should work with different names', () => { + const result1 = GitStorage.getDefaultAPIBaseUrl('v1'); + expect(result1).toBe('https://api.v1.3p.pierre.rip'); + + const result2 = GitStorage.getDefaultAPIBaseUrl('prod'); + expect(result2).toBe('https://api.prod.3p.pierre.rip'); + }); + }); + + describe('getDefaultStorageBaseUrl', () => { + it('should prepend name to storage base URL', () => { + // Assuming STORAGE_BASE_URL is '3p.pierre.rip' + const result = GitStorage.getDefaultStorageBaseUrl('v0'); + expect(result).toBe('v0.3p.pierre.rip'); + }); + + it('should work with different names', () => { + const result1 = GitStorage.getDefaultStorageBaseUrl('v1'); + expect(result1).toBe('v1.3p.pierre.rip'); + + const result2 = GitStorage.getDefaultStorageBaseUrl('prod'); + expect(result2).toBe('prod.3p.pierre.rip'); + }); + }); + + describe('URL construction with default values', () => { + it('should use getDefaultAPIBaseUrl when apiBaseUrl is not provided', async () => { + const store = new GitStorage({ name: 'v0', key }); + await store.createRepo({ id: 'test-repo' }); + + // Check that the API calls use the default API base URL with name inserted + expect(mockFetch).toHaveBeenCalledWith( + expect.stringContaining('api.v0.3p.pierre.rip'), + expect.any(Object) + ); + }); + + it('should use getDefaultStorageBaseUrl for remote URLs when storageBaseUrl is not provided', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'test-repo' }); + + const url = await repo.getRemoteURL(); + expect(url).toMatch( + /^https:\/\/t:.+@v0\.3p\.pierre\.rip\/test-repo\.git$/ + ); + }); + + it('should use getDefaultStorageBaseUrl for ephemeral remote URLs when storageBaseUrl is not provided', async () => { + const store = new GitStorage({ name: 'v0', key }); + const repo = await store.createRepo({ id: 'test-repo' }); + + const url = await repo.getEphemeralRemoteURL(); + expect(url).toMatch( + /^https:\/\/t:.+@v0\.3p\.pierre\.rip\/test-repo\+ephemeral\.git$/ + ); + }); + }); + + describe('URL construction with custom values', () => { + it('should use custom apiBaseUrl when provided', async () => { + const customApiBaseUrl = 'custom-api.example.com'; + const store = new GitStorage({ + name: 'v0', + key, + apiBaseUrl: customApiBaseUrl, + }); + await store.createRepo({ id: 'test-repo' }); + + // Check that the API calls use the custom API base URL + expect(mockFetch).toHaveBeenCalledWith( + expect.stringContaining(customApiBaseUrl), + expect.any(Object) + ); + }); + + it('should use custom storageBaseUrl for remote URLs when provided', async () => { + const customStorageBaseUrl = 'custom-storage.example.com'; + const store = new GitStorage({ + name: 'v0', + key, + storageBaseUrl: customStorageBaseUrl, + }); + const repo = await store.createRepo({ id: 'test-repo' }); + + const url = await repo.getRemoteURL(); + expect(url).toMatch( + /^https:\/\/t:.+@custom-storage\.example\.com\/test-repo\.git$/ + ); + }); + + it('should use custom storageBaseUrl for ephemeral remote URLs when provided', async () => { + const customStorageBaseUrl = 'custom-storage.example.com'; + const store = new GitStorage({ + name: 'v0', + key, + storageBaseUrl: customStorageBaseUrl, + }); + const repo = await store.createRepo({ id: 'test-repo' }); + + const url = await repo.getEphemeralRemoteURL(); + expect(url).toMatch( + /^https:\/\/t:.+@custom-storage\.example\.com\/test-repo\+ephemeral\.git$/ + ); + }); + + it('should use custom apiBaseUrl in createCommit transport', async () => { + const customApiBaseUrl = 'custom-api.example.com'; + const store = new GitStorage({ + name: 'v0', + key, + apiBaseUrl: customApiBaseUrl, + }); + const repo = await store.createRepo({ id: 'test-repo' }); + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + commit: { + commit_sha: 'abc123', + tree_sha: 'def456', + target_branch: 'main', + pack_bytes: 1024, + blob_count: 1, + }, + result: { + branch: 'main', + old_sha: 'old123', + new_sha: 'new456', + success: true, + status: 'ok', + }, + }), + } as any); + + const builder = repo.createCommit({ + targetBranch: 'main', + author: { name: 'Test', email: 'test@example.com' }, + commitMessage: 'Test commit', + }); + + await builder.addFileFromString('test.txt', 'test content').send(); + + // Verify that the fetch was called with the custom API base URL + const lastCall = mockFetch.mock.calls[mockFetch.mock.calls.length - 1]; + expect(lastCall[0]).toContain(customApiBaseUrl); + }); + + it('should use custom apiBaseUrl in createCommitFromDiff', async () => { + const customApiBaseUrl = 'custom-api.example.com'; + const store = new GitStorage({ + name: 'v0', + key, + apiBaseUrl: customApiBaseUrl, + }); + const repo = await store.createRepo({ id: 'test-repo' }); + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + commit: { + commit_sha: 'abc123', + tree_sha: 'def456', + target_branch: 'main', + pack_bytes: 1024, + blob_count: 1, + }, + result: { + branch: 'main', + old_sha: 'old123', + new_sha: 'new456', + success: true, + status: 'ok', + }, + }), + } as any); + + await repo.createCommitFromDiff({ + targetBranch: 'main', + author: { name: 'Test', email: 'test@example.com' }, + commitMessage: 'Test commit', + diff: 'diff content', + }); + + // Verify that the fetch was called with the custom API base URL + const lastCall = mockFetch.mock.calls[mockFetch.mock.calls.length - 1]; + expect(lastCall[0]).toContain(customApiBaseUrl); + }); + }); + + describe('Different name values', () => { + it('should generate correct URLs for different name values', async () => { + const names = ['v0', 'v1', 'staging', 'prod']; + + for (const name of names) { + mockFetch.mockReset(); + mockFetch.mockResolvedValue({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + repo_id: 'test-repo', + url: 'https://test.code.storage/repo.git', + }), + }); + + const store = new GitStorage({ name, key }); + const repo = await store.createRepo({ id: 'test-repo' }); + + const remoteUrl = await repo.getRemoteURL(); + expect(remoteUrl).toMatch( + new RegExp( + `^https:\\/\\/t:.+@${name}\\.3p\\.pierre\\.rip\\/test-repo\\.git$` + ) + ); + + const ephemeralUrl = await repo.getEphemeralRemoteURL(); + expect(ephemeralUrl).toMatch( + new RegExp( + `^https:\\/\\/t:.+@${name}\\.3p\\.pierre\\.rip\\/test-repo\\+ephemeral\\.git$` + ) + ); + + // Check API calls use the correct URL + expect(mockFetch).toHaveBeenCalledWith( + expect.stringContaining(`api.${name}.3p.pierre.rip`), + expect.any(Object) + ); + } + }); + }); + }); }); diff --git a/packages/git-storage-sdk-node/tests/version.test.ts b/packages/git-storage-sdk-node/tests/version.test.ts index b81825a23..0e23a2113 100644 --- a/packages/git-storage-sdk-node/tests/version.test.ts +++ b/packages/git-storage-sdk-node/tests/version.test.ts @@ -1,64 +1,65 @@ import { describe, expect, it } from 'vitest'; + import packageJson from '../package.json'; -import { getUserAgent, PACKAGE_NAME, PACKAGE_VERSION } from '../src/version'; +import { PACKAGE_NAME, PACKAGE_VERSION, getUserAgent } from '../src/version'; describe('version', () => { - describe('PACKAGE_NAME', () => { - it('should export the correct package name', () => { - expect(PACKAGE_NAME).toBe('code-storage-sdk'); - }); - }); + describe('PACKAGE_NAME', () => { + it('should export the correct package name', () => { + expect(PACKAGE_NAME).toBe('code-storage-sdk'); + }); + }); - describe('PACKAGE_VERSION', () => { - it('should export the correct package version', () => { - expect(PACKAGE_VERSION).toBe(packageJson.version); - }); + describe('PACKAGE_VERSION', () => { + it('should export the correct package version', () => { + expect(PACKAGE_VERSION).toBe(packageJson.version); + }); - it('should follow semantic versioning format', () => { - // Check if version follows semver pattern (e.g., 0.3.0, 1.0.0, 1.2.3-beta.1) - const semverPattern = /^\d+\.\d+\.\d+(-[a-zA-Z0-9.-]+)?$/; - expect(PACKAGE_VERSION).toMatch(semverPattern); - }); - }); + it('should follow semantic versioning format', () => { + // Check if version follows semver pattern (e.g., 0.3.0, 1.0.0, 1.2.3-beta.1) + const semverPattern = /^\d+\.\d+\.\d+(-[a-zA-Z0-9.-]+)?$/; + expect(PACKAGE_VERSION).toMatch(semverPattern); + }); + }); - describe('getUserAgent', () => { - it('should return a valid user agent string', () => { - const userAgent = getUserAgent(); - expect(userAgent).toBeDefined(); - expect(typeof userAgent).toBe('string'); - }); + describe('getUserAgent', () => { + it('should return a valid user agent string', () => { + const userAgent = getUserAgent(); + expect(userAgent).toBeDefined(); + expect(typeof userAgent).toBe('string'); + }); - it('should format user agent as "{name}/{version}"', () => { - const userAgent = getUserAgent(); - expect(userAgent).toBe(`${PACKAGE_NAME}/${PACKAGE_VERSION}`); - }); + it('should format user agent as "{name}/{version}"', () => { + const userAgent = getUserAgent(); + expect(userAgent).toBe(`${PACKAGE_NAME}/${PACKAGE_VERSION}`); + }); - it('should return a non-empty string', () => { - const userAgent = getUserAgent(); - expect(userAgent.length).toBeGreaterThan(0); - }); + it('should return a non-empty string', () => { + const userAgent = getUserAgent(); + expect(userAgent.length).toBeGreaterThan(0); + }); - it('should contain the package name', () => { - const userAgent = getUserAgent(); - expect(userAgent).toContain('code-storage-sdk'); - }); + it('should contain the package name', () => { + const userAgent = getUserAgent(); + expect(userAgent).toContain('code-storage-sdk'); + }); - it('should contain the version number', () => { - const userAgent = getUserAgent(); - expect(userAgent).toContain(packageJson.version); - }); + it('should contain the version number', () => { + const userAgent = getUserAgent(); + expect(userAgent).toContain(packageJson.version); + }); - it('should return consistent value across multiple calls', () => { - const userAgent1 = getUserAgent(); - const userAgent2 = getUserAgent(); - expect(userAgent1).toBe(userAgent2); - }); + it('should return consistent value across multiple calls', () => { + const userAgent1 = getUserAgent(); + const userAgent2 = getUserAgent(); + expect(userAgent1).toBe(userAgent2); + }); - it('should match the expected format pattern', () => { - const userAgent = getUserAgent(); - // Pattern: name/version - const pattern = /^[\w-]+\/\d+\.\d+\.\d+(-[a-zA-Z0-9.-]+)?$/; - expect(userAgent).toMatch(pattern); - }); - }); + it('should match the expected format pattern', () => { + const userAgent = getUserAgent(); + // Pattern: name/version + const pattern = /^[\w-]+\/\d+\.\d+\.\d+(-[a-zA-Z0-9.-]+)?$/; + expect(userAgent).toMatch(pattern); + }); + }); }); diff --git a/packages/git-storage-sdk-node/tests/webhook.test.ts b/packages/git-storage-sdk-node/tests/webhook.test.ts index 0c8c19714..3c30a4e58 100644 --- a/packages/git-storage-sdk-node/tests/webhook.test.ts +++ b/packages/git-storage-sdk-node/tests/webhook.test.ts @@ -1,370 +1,434 @@ import { describe, expect, it } from 'vitest'; + import { - parseSignatureHeader, - type RawWebhookPushEvent, - validateWebhook, - validateWebhookSignature, - type WebhookPushEvent, + type RawWebhookPushEvent, + type WebhookPushEvent, + parseSignatureHeader, + validateWebhook, + validateWebhookSignature, } from '../src'; import { createHmac } from '../src/util'; describe('Webhook Validation', () => { - const secret = 'test_webhook_secret_key_123'; - const rawPayload: RawWebhookPushEvent = { - repository: { - id: 'repo_abc123def456ghi789jkl', - url: 'https://git.example.com/org/repo', - }, - ref: 'main', - before: 'abc123000000000000000000000000000000000', - after: 'def456000000000000000000000000000000000', - customer_id: 'cust_xyz789mno456pqr123st', - pushed_at: '2024-01-20T10:30:00Z', - }; - const expectedPushPayload: WebhookPushEvent = { - type: 'push', - repository: { - id: 'repo_abc123def456ghi789jkl', - url: 'https://git.example.com/org/repo', - }, - ref: 'main', - before: 'abc123000000000000000000000000000000000', - after: 'def456000000000000000000000000000000000', - customerId: 'cust_xyz789mno456pqr123st', - pushedAt: new Date('2024-01-20T10:30:00Z'), - rawPushedAt: '2024-01-20T10:30:00Z', - }; - const payloadStr = JSON.stringify(rawPayload); - - // Helper to generate a valid signature - async function generateSignature( - payloadData: string, - webhookSecret: string, - timestamp?: number, - ): Promise<{ header: string; timestamp: number }> { - const ts = timestamp ?? Math.floor(Date.now() / 1000); - const signedData = `${ts}.${payloadData}`; - const signature = await createHmac('sha256', webhookSecret, signedData); - return { - header: `t=${ts},sha256=${signature}`, - timestamp: ts, - }; - } - - describe('parseSignatureHeader', () => { - it('should parse valid signature header', () => { - const header = 't=1234567890,sha256=abcdef123456'; - const result = parseSignatureHeader(header); - expect(result).toEqual({ - timestamp: '1234567890', - signature: 'abcdef123456', - }); - }); - - it('should handle header with spaces', () => { - const header = 't=1234567890, sha256=abcdef123456'; - const result = parseSignatureHeader(header); - expect(result).toEqual({ - timestamp: '1234567890', - signature: 'abcdef123456', - }); - }); - - it('should return null for invalid header format', () => { - expect(parseSignatureHeader('')).toBeNull(); - expect(parseSignatureHeader('invalid')).toBeNull(); - expect(parseSignatureHeader('t=123')).toBeNull(); // Missing signature - expect(parseSignatureHeader('sha256=abc')).toBeNull(); // Missing timestamp - expect(parseSignatureHeader('timestamp=123,signature=abc')).toBeNull(); // Wrong keys - }); - - it('should handle header with extra fields', () => { - const header = 't=1234567890,sha256=abcdef123456,v1=ignored'; - const result = parseSignatureHeader(header); - expect(result).toEqual({ - timestamp: '1234567890', - signature: 'abcdef123456', - }); - }); - }); - - describe('validateWebhookSignature', () => { - it('should validate correct signature', async () => { - const { header, timestamp } = await generateSignature(payloadStr, secret); - const result = await validateWebhookSignature(payloadStr, header, secret); - - expect(result).toEqual({ - valid: true, - timestamp, - }); - }); - - it('should validate with Buffer payload', async () => { - const { header, timestamp } = await generateSignature(payloadStr, secret); - const payloadBuffer = Buffer.from(payloadStr, 'utf8'); - const result = await validateWebhookSignature(payloadBuffer, header, secret); - - expect(result).toEqual({ - valid: true, - timestamp, - }); - }); - - it('should reject invalid signature', async () => { - const { header } = await generateSignature(payloadStr, 'wrong_secret'); - const result = await validateWebhookSignature(payloadStr, header, secret); - - expect(result.valid).toBe(false); - expect(result.error).toBe('Invalid signature'); - }); - - it('should reject old timestamp (replay protection)', async () => { - const oldTimestamp = Math.floor(Date.now() / 1000) - 400; // 400 seconds ago - const { header } = await generateSignature(payloadStr, secret, oldTimestamp); - const result = await validateWebhookSignature(payloadStr, header, secret); - - expect(result.valid).toBe(false); - expect(result.error).toMatch(/Webhook timestamp too old/); - expect(result.timestamp).toBe(oldTimestamp); - }); - - it('should reject future timestamp', async () => { - const futureTimestamp = Math.floor(Date.now() / 1000) + 120; // 2 minutes in future - const { header } = await generateSignature(payloadStr, secret, futureTimestamp); - const result = await validateWebhookSignature(payloadStr, header, secret); - - expect(result.valid).toBe(false); - expect(result.error).toBe('Webhook timestamp is in the future'); - expect(result.timestamp).toBe(futureTimestamp); - }); - - it('should allow disabling timestamp validation', async () => { - const oldTimestamp = Math.floor(Date.now() / 1000) - 3600; // 1 hour ago - const { header } = await generateSignature(payloadStr, secret, oldTimestamp); - const result = await validateWebhookSignature(payloadStr, header, secret, { - maxAgeSeconds: 0, - }); - - expect(result).toEqual({ - valid: true, - timestamp: oldTimestamp, - }); - }); - - it('should use custom max age', async () => { - const timestamp = Math.floor(Date.now() / 1000) - 60; // 60 seconds ago - const { header } = await generateSignature(payloadStr, secret, timestamp); - - // Should fail with 30 second max age - const result1 = await validateWebhookSignature(payloadStr, header, secret, { - maxAgeSeconds: 30, - }); - expect(result1.valid).toBe(false); - - // Should succeed with 120 second max age - const result2 = await validateWebhookSignature(payloadStr, header, secret, { - maxAgeSeconds: 120, - }); - expect(result2.valid).toBe(true); - }); - - it('should reject malformed signature header', async () => { - const result = await validateWebhookSignature(payloadStr, 'invalid_header', secret); - expect(result).toEqual({ - valid: false, - error: 'Invalid signature header format', - }); - }); - - it('should reject non-numeric timestamp', async () => { - const header = 't=not_a_number,sha256=abcdef123456'; - const result = await validateWebhookSignature(payloadStr, header, secret); - expect(result).toEqual({ - valid: false, - error: 'Invalid timestamp in signature', - }); - }); - - it('should handle different payload modifications', async () => { - const { header } = await generateSignature(payloadStr, secret); - - // Modified payload should fail - const modifiedPayload = payloadStr.replace('main', 'master'); - const result1 = await validateWebhookSignature(modifiedPayload, header, secret); - expect(result1.valid).toBe(false); - - // Extra whitespace should fail - const result2 = await validateWebhookSignature(payloadStr + ' ', header, secret); - expect(result2.valid).toBe(false); - - // Different encoding should work if content is same - const payloadBuffer = Buffer.from(payloadStr); - const result3 = await validateWebhookSignature(payloadBuffer, header, secret); - expect(result3.valid).toBe(true); - }); - }); - - describe('validateWebhook', () => { - it('should validate and parse webhook', async () => { - const { header, timestamp } = await generateSignature(payloadStr, secret); - const headers = { - 'x-pierre-signature': header, - 'x-pierre-event': 'push', - }; - - const result = await validateWebhook(payloadStr, headers, secret); - - expect(result.valid).toBe(true); - expect(result.eventType).toBe('push'); - expect(result.timestamp).toBe(timestamp); - expect(result.payload).toEqual(expectedPushPayload); - }); - - it('should handle uppercase headers', async () => { - const { header, timestamp } = await generateSignature(payloadStr, secret); - const headers = { - 'X-Pierre-Signature': header, - 'X-Pierre-Event': 'push', - }; - - const result = await validateWebhook(payloadStr, headers, secret); - - expect(result.valid).toBe(true); - expect(result.eventType).toBe('push'); - expect(result.timestamp).toBe(timestamp); - }); - - it('should reject missing signature header', async () => { - const headers = { - 'x-pierre-event': 'push', - }; - - const result = await validateWebhook(payloadStr, headers, secret); - - expect(result).toEqual({ - valid: false, - error: 'Missing or invalid X-Pierre-Signature header', - }); - }); - - it('should reject missing event header', async () => { - const { header } = await generateSignature(payloadStr, secret); - const headers = { - 'x-pierre-signature': header, - }; - - const result = await validateWebhook(payloadStr, headers, secret); - - expect(result).toEqual({ - valid: false, - error: 'Missing or invalid X-Pierre-Event header', - }); - }); - - it('should reject array headers', async () => { - const { header } = await generateSignature(payloadStr, secret); - - const headers1 = { - 'x-pierre-signature': [header, header], - 'x-pierre-event': 'push', - }; - const result1 = await validateWebhook(payloadStr, headers1, secret); - expect(result1.valid).toBe(false); - - const headers2 = { - 'x-pierre-signature': header, - 'x-pierre-event': ['push', 'push'], - }; - const result2 = await validateWebhook(payloadStr, headers2, secret); - expect(result2.valid).toBe(false); - }); - - it('should reject invalid JSON payload', async () => { - const invalidJson = 'not valid json'; - const { header } = await generateSignature(invalidJson, secret); - const headers = { - 'x-pierre-signature': header, - 'x-pierre-event': 'push', - }; - - const result = await validateWebhook(invalidJson, headers, secret); - - expect(result.valid).toBe(false); - expect(result.error).toBe('Invalid JSON payload'); - }); - - it('should propagate signature validation errors', async () => { - const { header } = await generateSignature(payloadStr, 'wrong_secret'); - const headers = { - 'x-pierre-signature': header, - 'x-pierre-event': 'push', - }; - - const result = await validateWebhook(payloadStr, headers, secret); - - expect(result.valid).toBe(false); - expect(result.error).toBe('Invalid signature'); - }); - - it('should pass through validation options', async () => { - const oldTimestamp = Math.floor(Date.now() / 1000) - 3600; // 1 hour ago - const { header } = await generateSignature(payloadStr, secret, oldTimestamp); - const headers = { - 'x-pierre-signature': header, - 'x-pierre-event': 'push', - }; - - // Should fail with default max age - const result1 = await validateWebhook(payloadStr, headers, secret); - expect(result1.valid).toBe(false); - - // Should succeed with disabled timestamp validation - const result2 = await validateWebhook(payloadStr, headers, secret, { maxAgeSeconds: 0 }); - expect(result2.valid).toBe(true); - expect(result2.payload).toEqual(expectedPushPayload); - }); - }); - - describe('Security considerations', () => { - it('should use constant-time comparison', async () => { - // This test verifies the implementation uses timingSafeEqual - // by ensuring different length signatures are rejected before comparison - const { header } = await generateSignature(payloadStr, secret); - const shortSigHeader = header.replace(/sha256=.*/, 'sha256=short'); - const result = await validateWebhookSignature(payloadStr, shortSigHeader, secret); - - expect(result.valid).toBe(false); - expect(result.error).toBe('Invalid signature'); - }); - - it('should handle empty or undefined inputs safely', async () => { - const { header } = await generateSignature(payloadStr, secret); - - // Empty payload - expect((await validateWebhookSignature('', header, secret)).valid).toBe(false); - - // Empty secret - expect((await validateWebhookSignature(payloadStr, header, '')).valid).toBe(false); - - // Empty header - expect((await validateWebhookSignature(payloadStr, '', secret)).valid).toBe(false); - }); - - it('should be resilient to timing attacks', async () => { - // Generate multiple signatures to test timing consistency - const signatures: string[] = []; - for (let i = 0; i < 10; i++) { - const testSecret = `secret_${i}`; - const { header } = await generateSignature(payloadStr, testSecret); - signatures.push(header); - } - - // All invalid signatures should be rejected - // The implementation should use constant-time comparison - for (const sig of signatures) { - const result = await validateWebhookSignature(payloadStr, sig, secret); - expect(result.valid).toBe(false); - } - }); - }); + const secret = 'test_webhook_secret_key_123'; + const rawPayload: RawWebhookPushEvent = { + repository: { + id: 'repo_abc123def456ghi789jkl', + url: 'https://git.example.com/org/repo', + }, + ref: 'main', + before: 'abc123000000000000000000000000000000000', + after: 'def456000000000000000000000000000000000', + customer_id: 'cust_xyz789mno456pqr123st', + pushed_at: '2024-01-20T10:30:00Z', + }; + const expectedPushPayload: WebhookPushEvent = { + type: 'push', + repository: { + id: 'repo_abc123def456ghi789jkl', + url: 'https://git.example.com/org/repo', + }, + ref: 'main', + before: 'abc123000000000000000000000000000000000', + after: 'def456000000000000000000000000000000000', + customerId: 'cust_xyz789mno456pqr123st', + pushedAt: new Date('2024-01-20T10:30:00Z'), + rawPushedAt: '2024-01-20T10:30:00Z', + }; + const payloadStr = JSON.stringify(rawPayload); + + // Helper to generate a valid signature + async function generateSignature( + payloadData: string, + webhookSecret: string, + timestamp?: number + ): Promise<{ header: string; timestamp: number }> { + const ts = timestamp ?? Math.floor(Date.now() / 1000); + const signedData = `${ts}.${payloadData}`; + const signature = await createHmac('sha256', webhookSecret, signedData); + return { + header: `t=${ts},sha256=${signature}`, + timestamp: ts, + }; + } + + describe('parseSignatureHeader', () => { + it('should parse valid signature header', () => { + const header = 't=1234567890,sha256=abcdef123456'; + const result = parseSignatureHeader(header); + expect(result).toEqual({ + timestamp: '1234567890', + signature: 'abcdef123456', + }); + }); + + it('should handle header with spaces', () => { + const header = 't=1234567890, sha256=abcdef123456'; + const result = parseSignatureHeader(header); + expect(result).toEqual({ + timestamp: '1234567890', + signature: 'abcdef123456', + }); + }); + + it('should return null for invalid header format', () => { + expect(parseSignatureHeader('')).toBeNull(); + expect(parseSignatureHeader('invalid')).toBeNull(); + expect(parseSignatureHeader('t=123')).toBeNull(); // Missing signature + expect(parseSignatureHeader('sha256=abc')).toBeNull(); // Missing timestamp + expect(parseSignatureHeader('timestamp=123,signature=abc')).toBeNull(); // Wrong keys + }); + + it('should handle header with extra fields', () => { + const header = 't=1234567890,sha256=abcdef123456,v1=ignored'; + const result = parseSignatureHeader(header); + expect(result).toEqual({ + timestamp: '1234567890', + signature: 'abcdef123456', + }); + }); + }); + + describe('validateWebhookSignature', () => { + it('should validate correct signature', async () => { + const { header, timestamp } = await generateSignature(payloadStr, secret); + const result = await validateWebhookSignature(payloadStr, header, secret); + + expect(result).toEqual({ + valid: true, + timestamp, + }); + }); + + it('should validate with Buffer payload', async () => { + const { header, timestamp } = await generateSignature(payloadStr, secret); + const payloadBuffer = Buffer.from(payloadStr, 'utf8'); + const result = await validateWebhookSignature( + payloadBuffer, + header, + secret + ); + + expect(result).toEqual({ + valid: true, + timestamp, + }); + }); + + it('should reject invalid signature', async () => { + const { header } = await generateSignature(payloadStr, 'wrong_secret'); + const result = await validateWebhookSignature(payloadStr, header, secret); + + expect(result.valid).toBe(false); + expect(result.error).toBe('Invalid signature'); + }); + + it('should reject old timestamp (replay protection)', async () => { + const oldTimestamp = Math.floor(Date.now() / 1000) - 400; // 400 seconds ago + const { header } = await generateSignature( + payloadStr, + secret, + oldTimestamp + ); + const result = await validateWebhookSignature(payloadStr, header, secret); + + expect(result.valid).toBe(false); + expect(result.error).toMatch(/Webhook timestamp too old/); + expect(result.timestamp).toBe(oldTimestamp); + }); + + it('should reject future timestamp', async () => { + const futureTimestamp = Math.floor(Date.now() / 1000) + 120; // 2 minutes in future + const { header } = await generateSignature( + payloadStr, + secret, + futureTimestamp + ); + const result = await validateWebhookSignature(payloadStr, header, secret); + + expect(result.valid).toBe(false); + expect(result.error).toBe('Webhook timestamp is in the future'); + expect(result.timestamp).toBe(futureTimestamp); + }); + + it('should allow disabling timestamp validation', async () => { + const oldTimestamp = Math.floor(Date.now() / 1000) - 3600; // 1 hour ago + const { header } = await generateSignature( + payloadStr, + secret, + oldTimestamp + ); + const result = await validateWebhookSignature( + payloadStr, + header, + secret, + { + maxAgeSeconds: 0, + } + ); + + expect(result).toEqual({ + valid: true, + timestamp: oldTimestamp, + }); + }); + + it('should use custom max age', async () => { + const timestamp = Math.floor(Date.now() / 1000) - 60; // 60 seconds ago + const { header } = await generateSignature(payloadStr, secret, timestamp); + + // Should fail with 30 second max age + const result1 = await validateWebhookSignature( + payloadStr, + header, + secret, + { + maxAgeSeconds: 30, + } + ); + expect(result1.valid).toBe(false); + + // Should succeed with 120 second max age + const result2 = await validateWebhookSignature( + payloadStr, + header, + secret, + { + maxAgeSeconds: 120, + } + ); + expect(result2.valid).toBe(true); + }); + + it('should reject malformed signature header', async () => { + const result = await validateWebhookSignature( + payloadStr, + 'invalid_header', + secret + ); + expect(result).toEqual({ + valid: false, + error: 'Invalid signature header format', + }); + }); + + it('should reject non-numeric timestamp', async () => { + const header = 't=not_a_number,sha256=abcdef123456'; + const result = await validateWebhookSignature(payloadStr, header, secret); + expect(result).toEqual({ + valid: false, + error: 'Invalid timestamp in signature', + }); + }); + + it('should handle different payload modifications', async () => { + const { header } = await generateSignature(payloadStr, secret); + + // Modified payload should fail + const modifiedPayload = payloadStr.replace('main', 'master'); + const result1 = await validateWebhookSignature( + modifiedPayload, + header, + secret + ); + expect(result1.valid).toBe(false); + + // Extra whitespace should fail + const result2 = await validateWebhookSignature( + payloadStr + ' ', + header, + secret + ); + expect(result2.valid).toBe(false); + + // Different encoding should work if content is same + const payloadBuffer = Buffer.from(payloadStr); + const result3 = await validateWebhookSignature( + payloadBuffer, + header, + secret + ); + expect(result3.valid).toBe(true); + }); + }); + + describe('validateWebhook', () => { + it('should validate and parse webhook', async () => { + const { header, timestamp } = await generateSignature(payloadStr, secret); + const headers = { + 'x-pierre-signature': header, + 'x-pierre-event': 'push', + }; + + const result = await validateWebhook(payloadStr, headers, secret); + + expect(result.valid).toBe(true); + expect(result.eventType).toBe('push'); + expect(result.timestamp).toBe(timestamp); + expect(result.payload).toEqual(expectedPushPayload); + }); + + it('should handle uppercase headers', async () => { + const { header, timestamp } = await generateSignature(payloadStr, secret); + const headers = { + 'X-Pierre-Signature': header, + 'X-Pierre-Event': 'push', + }; + + const result = await validateWebhook(payloadStr, headers, secret); + + expect(result.valid).toBe(true); + expect(result.eventType).toBe('push'); + expect(result.timestamp).toBe(timestamp); + }); + + it('should reject missing signature header', async () => { + const headers = { + 'x-pierre-event': 'push', + }; + + const result = await validateWebhook(payloadStr, headers, secret); + + expect(result).toEqual({ + valid: false, + error: 'Missing or invalid X-Pierre-Signature header', + }); + }); + + it('should reject missing event header', async () => { + const { header } = await generateSignature(payloadStr, secret); + const headers = { + 'x-pierre-signature': header, + }; + + const result = await validateWebhook(payloadStr, headers, secret); + + expect(result).toEqual({ + valid: false, + error: 'Missing or invalid X-Pierre-Event header', + }); + }); + + it('should reject array headers', async () => { + const { header } = await generateSignature(payloadStr, secret); + + const headers1 = { + 'x-pierre-signature': [header, header], + 'x-pierre-event': 'push', + }; + const result1 = await validateWebhook(payloadStr, headers1, secret); + expect(result1.valid).toBe(false); + + const headers2 = { + 'x-pierre-signature': header, + 'x-pierre-event': ['push', 'push'], + }; + const result2 = await validateWebhook(payloadStr, headers2, secret); + expect(result2.valid).toBe(false); + }); + + it('should reject invalid JSON payload', async () => { + const invalidJson = 'not valid json'; + const { header } = await generateSignature(invalidJson, secret); + const headers = { + 'x-pierre-signature': header, + 'x-pierre-event': 'push', + }; + + const result = await validateWebhook(invalidJson, headers, secret); + + expect(result.valid).toBe(false); + expect(result.error).toBe('Invalid JSON payload'); + }); + + it('should propagate signature validation errors', async () => { + const { header } = await generateSignature(payloadStr, 'wrong_secret'); + const headers = { + 'x-pierre-signature': header, + 'x-pierre-event': 'push', + }; + + const result = await validateWebhook(payloadStr, headers, secret); + + expect(result.valid).toBe(false); + expect(result.error).toBe('Invalid signature'); + }); + + it('should pass through validation options', async () => { + const oldTimestamp = Math.floor(Date.now() / 1000) - 3600; // 1 hour ago + const { header } = await generateSignature( + payloadStr, + secret, + oldTimestamp + ); + const headers = { + 'x-pierre-signature': header, + 'x-pierre-event': 'push', + }; + + // Should fail with default max age + const result1 = await validateWebhook(payloadStr, headers, secret); + expect(result1.valid).toBe(false); + + // Should succeed with disabled timestamp validation + const result2 = await validateWebhook(payloadStr, headers, secret, { + maxAgeSeconds: 0, + }); + expect(result2.valid).toBe(true); + expect(result2.payload).toEqual(expectedPushPayload); + }); + }); + + describe('Security considerations', () => { + it('should use constant-time comparison', async () => { + // This test verifies the implementation uses timingSafeEqual + // by ensuring different length signatures are rejected before comparison + const { header } = await generateSignature(payloadStr, secret); + const shortSigHeader = header.replace(/sha256=.*/, 'sha256=short'); + const result = await validateWebhookSignature( + payloadStr, + shortSigHeader, + secret + ); + + expect(result.valid).toBe(false); + expect(result.error).toBe('Invalid signature'); + }); + + it('should handle empty or undefined inputs safely', async () => { + const { header } = await generateSignature(payloadStr, secret); + + // Empty payload + expect((await validateWebhookSignature('', header, secret)).valid).toBe( + false + ); + + // Empty secret + expect( + (await validateWebhookSignature(payloadStr, header, '')).valid + ).toBe(false); + + // Empty header + expect( + (await validateWebhookSignature(payloadStr, '', secret)).valid + ).toBe(false); + }); + + it('should be resilient to timing attacks', async () => { + // Generate multiple signatures to test timing consistency + const signatures: string[] = []; + for (let i = 0; i < 10; i++) { + const testSecret = `secret_${i}`; + const { header } = await generateSignature(payloadStr, testSecret); + signatures.push(header); + } + + // All invalid signatures should be rejected + // The implementation should use constant-time comparison + for (const sig of signatures) { + const result = await validateWebhookSignature(payloadStr, sig, secret); + expect(result.valid).toBe(false); + } + }); + }); }); diff --git a/packages/git-storage-sdk-node/tsconfig.json b/packages/git-storage-sdk-node/tsconfig.json index 1116724a5..d95b7ff86 100644 --- a/packages/git-storage-sdk-node/tsconfig.json +++ b/packages/git-storage-sdk-node/tsconfig.json @@ -1,9 +1,9 @@ { - "extends": "../../tsconfig.options.json", - "include": ["src/**/*", "tsup.config.ts", "package.json"], - "references": [], - "compilerOptions": { - "outDir": "../../.moon/cache/types/packages/git-storage-sdk", - "tsBuildInfoFile": "../../.moon/cache/types/packages/git-storage-sdk/.tsbuildinfo" - } + "extends": "../../tsconfig.options.json", + "include": ["src/**/*", "tsup.config.ts", "package.json"], + "references": [], + "compilerOptions": { + "outDir": "../../.moon/cache/types/packages/git-storage-sdk", + "tsBuildInfoFile": "../../.moon/cache/types/packages/git-storage-sdk/.tsbuildinfo" + } } diff --git a/packages/git-storage-sdk-node/tsconfig.tsup.json b/packages/git-storage-sdk-node/tsconfig.tsup.json index fa4cd2a9b..fea75b8c0 100644 --- a/packages/git-storage-sdk-node/tsconfig.tsup.json +++ b/packages/git-storage-sdk-node/tsconfig.tsup.json @@ -1,13 +1,13 @@ { - "extends": "../../tsconfig.options.json", - "include": ["src/**/*"], - "compilerOptions": { - "composite": false, - "incremental": false, - "declaration": true, - "emitDeclarationOnly": true, - "declarationMap": true, - "outDir": "./dist", - "tsBuildInfoFile": "./dist/.tsbuildinfo" - } + "extends": "../../tsconfig.options.json", + "include": ["src/**/*"], + "compilerOptions": { + "composite": false, + "incremental": false, + "declaration": true, + "emitDeclarationOnly": true, + "declarationMap": true, + "outDir": "./dist", + "tsBuildInfoFile": "./dist/.tsbuildinfo" + } } diff --git a/packages/git-storage-sdk-node/tsup.config.ts b/packages/git-storage-sdk-node/tsup.config.ts index 4adefdf5c..3f0e8a1ce 100644 --- a/packages/git-storage-sdk-node/tsup.config.ts +++ b/packages/git-storage-sdk-node/tsup.config.ts @@ -1,21 +1,21 @@ import { defineConfig } from 'tsup'; export default defineConfig({ - entry: ['src/index.ts'], - format: ['cjs', 'esm'], - dts: true, - clean: true, - sourcemap: true, - minify: false, - splitting: false, - treeshake: true, - external: ['node:crypto', 'crypto'], - tsconfig: 'tsconfig.tsup.json', - esbuildOptions(options) { - // Always define the URLs at build time - options.define = { - __API_BASE_URL__: JSON.stringify('https://api.{{org}}.code.storage'), - __STORAGE_BASE_URL__: JSON.stringify('{{org}}.code.storage'), - }; - }, + entry: ['src/index.ts'], + format: ['cjs', 'esm'], + dts: true, + clean: true, + sourcemap: true, + minify: false, + splitting: false, + treeshake: true, + external: ['node:crypto', 'crypto'], + tsconfig: 'tsconfig.tsup.json', + esbuildOptions(options) { + // Always define the URLs at build time + options.define = { + __API_BASE_URL__: JSON.stringify('https://api.{{org}}.code.storage'), + __STORAGE_BASE_URL__: JSON.stringify('{{org}}.code.storage'), + }; + }, }); diff --git a/packages/git-storage-sdk-node/vitest.config.ts b/packages/git-storage-sdk-node/vitest.config.ts index fda74e8ec..cf6e0fa33 100644 --- a/packages/git-storage-sdk-node/vitest.config.ts +++ b/packages/git-storage-sdk-node/vitest.config.ts @@ -1,11 +1,11 @@ import { defineConfig } from 'vitest/config'; export default defineConfig({ - test: { - restoreMocks: true, - }, - define: { - __API_BASE_URL__: JSON.stringify('https://api.{{org}}.3p.pierre.rip'), - __STORAGE_BASE_URL__: JSON.stringify('{{org}}.3p.pierre.rip'), - }, + test: { + restoreMocks: true, + }, + define: { + __API_BASE_URL__: JSON.stringify('https://api.{{org}}.3p.pierre.rip'), + __STORAGE_BASE_URL__: JSON.stringify('{{org}}.3p.pierre.rip'), + }, }); diff --git a/packages/git-storage-sdk-python/DEVELOPMENT.md b/packages/git-storage-sdk-python/DEVELOPMENT.md index ba06cfcc3..4c3394072 100644 --- a/packages/git-storage-sdk-python/DEVELOPMENT.md +++ b/packages/git-storage-sdk-python/DEVELOPMENT.md @@ -1,6 +1,7 @@ # Development Guide -This document provides technical details for developers working on the Pierre Storage Python SDK. +This document provides technical details for developers working on the Pierre +Storage Python SDK. ## Architecture @@ -10,7 +11,8 @@ The SDK is organized into the following modules: - **`client.py`**: Main `GitStorage` class for creating/finding repositories - **`repo.py`**: `RepoImpl` class implementing all repository operations -- **`commit.py`**: `CommitBuilderImpl` for creating commits with streaming support +- **`commit.py`**: `CommitBuilderImpl` for creating commits with streaming + support - **`auth.py`**: JWT token generation and signing - **`errors.py`**: Custom exception classes - **`types.py`**: Type definitions using TypedDict and Enums @@ -18,8 +20,10 @@ The SDK is organized into the following modules: ### Design Patterns -1. **Protocol-based interfaces**: Uses `Protocol` classes for type checking without inheritance -2. **Fluent builder**: `CommitBuilder` provides chainable methods for composing commits +1. **Protocol-based interfaces**: Uses `Protocol` classes for type checking + without inheritance +2. **Fluent builder**: `CommitBuilder` provides chainable methods for composing + commits 3. **Async/await**: All I/O operations are async for better performance 4. **Streaming**: Large files are streamed in 4MB chunks to avoid memory issues @@ -28,6 +32,7 @@ The SDK is organized into the following modules: ### Authentication (`auth.py`) JWT generation with automatic algorithm detection: + - ES256 for elliptic curve keys (most common) - RS256 for RSA keys - EdDSA for Ed25519/Ed448 keys @@ -37,6 +42,7 @@ Uses `cryptography` library for key loading and PyJWT for signing. ### Commit Builder (`commit.py`) Key features: + - Fluent API for building commits - Streaming support for large files - Chunking into 4MB segments @@ -46,6 +52,7 @@ Key features: ### Repository Operations (`repo.py`) Implements all Git storage API endpoints: + - File operations (get, list) - Branch and commit listing with pagination - Diff operations (branch, commit) @@ -56,6 +63,7 @@ Implements all Git storage API endpoints: ### Type System (`types.py`) Uses TypedDict for better IDE support and runtime type checking: + - All API options are typed - Results are structured with TypedDict - Enums for constants (DiffFileState, GitFileMode) diff --git a/packages/git-storage-sdk-python/PROJECT_SUMMARY.md b/packages/git-storage-sdk-python/PROJECT_SUMMARY.md index c915ccaf0..f5a1a1dbd 100644 --- a/packages/git-storage-sdk-python/PROJECT_SUMMARY.md +++ b/packages/git-storage-sdk-python/PROJECT_SUMMARY.md @@ -2,9 +2,11 @@ ## ✅ Completed - Version 0.4.2 -A fully-functional, production-ready Python SDK for Pierre Git Storage, mirroring the TypeScript SDK functionality. +A fully-functional, production-ready Python SDK for Pierre Git Storage, +mirroring the TypeScript SDK functionality. ### Latest Updates (v0.4.2) + - ✅ One-shot diff commits via `create_commit_from_diff` - ✅ Shared commit option normalization across builder/diff flows - ✅ Expanded SDK documentation and quick-start examples @@ -54,6 +56,7 @@ packages/git-storage-sdk-python/ ## 🎯 Key Features ### Core Functionality + - ✅ Repository creation and management - ✅ JWT-based authentication (ES256, RS256, EdDSA) - ✅ Public JWT helper for manual token generation @@ -65,6 +68,7 @@ packages/git-storage-sdk-python/ - ✅ Webhook signature validation ### Developer Experience + - ✅ Full type hints throughout - ✅ Async/await API - ✅ Fluent commit builder API @@ -91,6 +95,7 @@ moon run git-storage-sdk-python:test ``` ### Test Results + ``` 26 passed in 0.11s ✅ Coverage: 45% overall @@ -102,6 +107,7 @@ Coverage: 45% overall ## 📦 Dependencies ### Required (Runtime) + - `httpx` - Async HTTP client with streaming - `pyjwt` - JWT encoding/decoding - `cryptography` - Key management @@ -109,6 +115,7 @@ Coverage: 45% overall - `typing-extensions` - Type hint backports (Python 3.8-3.9) ### Development + - `pytest` - Test framework - `pytest-asyncio` - Async test support - `pytest-cov` - Coverage reporting @@ -237,21 +244,22 @@ Potential improvements (not required for v0.1.2): ## 🎓 Comparison with TypeScript SDK -| Feature | TypeScript | Python | Status | -|---------|-----------|--------|--------| -| Repository operations | ✅ | ✅ | Complete | -| JWT authentication | ✅ | ✅ | Complete | -| Commit builder | ✅ | ✅ | Complete | -| File streaming | ✅ | ✅ | Complete | -| Webhook validation | ✅ | ✅ | Complete | -| Error handling | ✅ | ✅ | Complete | -| Type definitions | ✅ | ✅ | Complete | -| Documentation | ✅ | ✅ | Complete | -| Unit tests | ✅ | ✅ | Complete | +| Feature | TypeScript | Python | Status | +| --------------------- | ---------- | ------ | -------- | +| Repository operations | ✅ | ✅ | Complete | +| JWT authentication | ✅ | ✅ | Complete | +| Commit builder | ✅ | ✅ | Complete | +| File streaming | ✅ | ✅ | Complete | +| Webhook validation | ✅ | ✅ | Complete | +| Error handling | ✅ | ✅ | Complete | +| Type definitions | ✅ | ✅ | Complete | +| Documentation | ✅ | ✅ | Complete | +| Unit tests | ✅ | ✅ | Complete | ## ✨ Ready for Use The Python SDK is **production-ready** and can be: + - ✅ Published to PyPI - ✅ Used in production applications - ✅ Integrated into existing Python projects diff --git a/packages/git-storage-sdk-python/PUBLISHING.md b/packages/git-storage-sdk-python/PUBLISHING.md index 55773b65f..8db754d46 100644 --- a/packages/git-storage-sdk-python/PUBLISHING.md +++ b/packages/git-storage-sdk-python/PUBLISHING.md @@ -1,6 +1,7 @@ # Publishing to PyPI - Complete Guide -This guide walks you through publishing the `pierre-storage` package to PyPI for the first time. +This guide walks you through publishing the `pierre-storage` package to PyPI for +the first time. ## Prerequisites @@ -33,6 +34,7 @@ Do the same for TestPyPI if you want (optional but recommended). Instead of using passwords, we'll use API tokens (more secure): #### For TestPyPI (testing): + 1. Go to https://test.pypi.org/manage/account/token/ 2. Click "Add API token" 3. Token name: `pierre-storage-test` @@ -41,6 +43,7 @@ Instead of using passwords, we'll use API tokens (more secure): 6. **Save it immediately** - you won't see it again! #### For PyPI (production): + 1. Go to https://pypi.org/manage/account/token/ 2. Click "Add API token" 3. Token name: `pierre-storage` @@ -100,6 +103,7 @@ uv build ``` This creates two files in `dist/`: + - `pierre_storage-0.4.2-py3-none-any.whl` (wheel - preferred format) - `pierre-storage-0.4.2.tar.gz` (source distribution) @@ -129,9 +133,11 @@ uv run twine upload --repository testpypi dist/* # Enter your password: [paste your TestPyPI token starting with pypi-...] ``` -> **Important**: Username is literally `__token__` (with two underscores), not your username! +> **Important**: Username is literally `__token__` (with two underscores), not +> your username! If successful, you'll see: + ``` Uploading pierre_storage-0.4.2-py3-none-any.whl Uploading pierre-storage-0.4.2.tar.gz @@ -156,7 +162,8 @@ deactivate rm -rf test-env ``` -> **Note**: We use `--extra-index-url` because dependencies (httpx, pyjwt, etc.) are on the real PyPI, not TestPyPI. +> **Note**: We use `--extra-index-url` because dependencies (httpx, pyjwt, etc.) +> are on the real PyPI, not TestPyPI. ### Step 7: Upload to Real PyPI 🚀 @@ -177,6 +184,7 @@ uv run twine upload dist/* Success! 🎉 You'll see: + ``` Uploading pierre_storage-0.4.2-py3-none-any.whl Uploading pierre-storage-0.4.2.tar.gz @@ -231,6 +239,7 @@ password = pypi-YOUR-TEST-TOKEN-HERE ``` **Secure the file:** + ```bash chmod 600 ~/.pypirc ``` @@ -298,6 +307,7 @@ You can't re-upload the same version. You must increment the version number. ### Error: "Invalid username or password" Common mistakes: + - Username should be `__token__` (with two underscores), not your PyPI username - Password should be the full token starting with `pypi-` - Make sure you're using the right token (TestPyPI vs PyPI) @@ -307,6 +317,7 @@ Common mistakes: You don't have permission to upload to that package name. **Solutions**: + - If it's your first upload, this shouldn't happen - If someone else owns the name, you need to choose a different name - Make sure you're logged in to the right account @@ -318,6 +329,7 @@ Wait a few minutes - PyPI can take 5-15 minutes to index new packages. ### Import error after installation Make sure: + - Your package structure is correct - `__init__.py` exports the right things - You're testing in a fresh virtual environment @@ -346,15 +358,18 @@ twine upload dist/* ## Scoped Tokens (After First Upload) -After your first successful upload, create project-scoped tokens for better security: +After your first successful upload, create project-scoped tokens for better +security: ### For PyPI: + 1. Go to https://pypi.org/manage/project/pierre-storage/settings/ 2. Scroll to "API tokens" 3. Create new token with scope: "Project: pierre-storage" 4. Update your `~/.pypirc` with the new token ### For TestPyPI: + Do the same at https://test.pypi.org/manage/project/pierre-storage/settings/ ## Quick Reference @@ -375,6 +390,7 @@ twine upload dist/* # Then production ## Next Steps After Publishing 1. **Add PyPI badge to README**: + ```markdown [![PyPI version](https://badge.fury.io/py/pierre-storage.svg)](https://badge.fury.io/py/pierre-storage) ``` diff --git a/packages/git-storage-sdk-python/QUICKSTART.md b/packages/git-storage-sdk-python/QUICKSTART.md index abdd3a071..63631c3b7 100644 --- a/packages/git-storage-sdk-python/QUICKSTART.md +++ b/packages/git-storage-sdk-python/QUICKSTART.md @@ -108,7 +108,8 @@ asyncio.run(make_changes()) ### Applying a Diff Directly -If you already have a unified diff (for example, generated by `git diff`), you can apply it without building file operations manually: +If you already have a unified diff (for example, generated by `git diff`), you +can apply it without building file operations manually: ```python async def apply_diff(): @@ -177,6 +178,7 @@ print(f"Git URL: {git_url}") ``` This is useful when you need: + - Custom token expiration times - Specific permission scopes - Tokens for external tools @@ -220,7 +222,8 @@ except RefUpdateError as e: ## Next Steps - Read the [full documentation](README.md) for detailed API reference -- Check out [webhook validation](README.md#webhook-validation) for integrating with events +- Check out [webhook validation](README.md#webhook-validation) for integrating + with events - See [examples/](examples/) for more complex use cases ## Need Help? diff --git a/packages/git-storage-sdk-python/README.md b/packages/git-storage-sdk-python/README.md index 7b17251d9..3c4e3f43e 100644 --- a/packages/git-storage-sdk-python/README.md +++ b/packages/git-storage-sdk-python/README.md @@ -208,7 +208,8 @@ print(commit_diff["files"]) ### Creating Commits -The SDK provides a fluent builder API for creating commits with streaming support: +The SDK provides a fluent builder API for creating commits with streaming +support: ```python # Create a commit @@ -232,7 +233,8 @@ print(result["ref_update"]["old_sha"]) # All zeroes when ref is created The builder exposes: - `add_file(path, source, *, mode=None)` - Attach bytes from various sources -- `add_file_from_string(path, contents, encoding="utf-8", *, mode=None)` - Add text files (defaults to UTF-8) +- `add_file_from_string(path, contents, encoding="utf-8", *, mode=None)` - Add + text files (defaults to UTF-8) - `delete_path(path)` - Remove files or folders - `send()` - Finalize the commit and receive metadata @@ -253,22 +255,29 @@ The builder exposes: } ``` -If the backend reports a failure, the builder raises a `RefUpdateError` containing the status, reason, and ref details. +If the backend reports a failure, the builder raises a `RefUpdateError` +containing the status, reason, and ref details. **Options:** - `target_branch` (required): Branch name (without `refs/heads/` prefix) -- `expected_head_sha` (optional): Branch or commit that must match the remote tip -- `base_branch` (optional): Name of the branch to use as the base when creating a new branch (without `refs/heads/` prefix) -- `ephemeral` (optional): Mark the target branch as ephemeral (stored in separate namespace) -- `ephemeral_base` (optional): Indicates the base branch is ephemeral (requires `base_branch`) +- `expected_head_sha` (optional): Branch or commit that must match the remote + tip +- `base_branch` (optional): Name of the branch to use as the base when creating + a new branch (without `refs/heads/` prefix) +- `ephemeral` (optional): Mark the target branch as ephemeral (stored in + separate namespace) +- `ephemeral_base` (optional): Indicates the base branch is ephemeral (requires + `base_branch`) - `commit_message` (required): The commit message - `author` (required): Dictionary with `name` and `email` -- `committer` (optional): Dictionary with `name` and `email` (defaults to author) +- `committer` (optional): Dictionary with `name` and `email` (defaults to + author) ### Creating Commits from Diff Streams -When you already have a unified diff, you can let the SDK apply it directly without building individual file operations: +When you already have a unified diff, you can let the SDK apply it directly +without building individual file operations: ```python diff_text = """\ @@ -291,13 +300,21 @@ result = await repo.create_commit_from_diff( print(result["commit_sha"]) ``` -`diff` accepts the same source types as the commit builder (string, bytes, async iterator, etc.). The helper automatically streams the diff to the `/diff-commit` endpoint and returns a `CommitResult`. On conflicts or validation errors, it raises `RefUpdateError` with the server-provided status and message. +`diff` accepts the same source types as the commit builder (string, bytes, async +iterator, etc.). The helper automatically streams the diff to the `/diff-commit` +endpoint and returns a `CommitResult`. On conflicts or validation errors, it +raises `RefUpdateError` with the server-provided status and message. -You can provide the same metadata options as `create_commit`, including `expected_head_sha`, `base_branch`, `ephemeral`, `ephemeral_base`, and `committer`. +You can provide the same metadata options as `create_commit`, including +`expected_head_sha`, `base_branch`, `ephemeral`, `ephemeral_base`, and +`committer`. -> Files are chunked into 4 MiB segments, allowing streaming of large assets without buffering in memory. +> Files are chunked into 4 MiB segments, allowing streaming of large assets +> without buffering in memory. -> The `target_branch` must already exist on the remote repository. To seed an empty repository, omit `expected_head_sha`; the service will create the first commit only when no refs are present. +> The `target_branch` must already exist on the remote repository. To seed an +> empty repository, omit `expected_head_sha`; the service will create the first +> commit only when no refs are present. **Branching Example:** @@ -317,7 +334,9 @@ result = await ( ### Ephemeral Branches -Ephemeral branches are temporary branches that are stored in a separate namespace. They're useful for preview environments, temporary workspaces, or short-lived feature branches that don't need to be permanent. +Ephemeral branches are temporary branches that are stored in a separate +namespace. They're useful for preview environments, temporary workspaces, or +short-lived feature branches that don't need to be permanent. **Creating an ephemeral branch:** @@ -388,9 +407,12 @@ print(result["target_branch"]) # "feature/awesome-change" - Ephemeral branches are stored separately from regular branches - Use `ephemeral=True` when creating commits, reading files, or listing files -- Use `ephemeral_base=True` when branching off another ephemeral branch (requires `base_branch`) -- Promote an ephemeral branch with `repo.promote_ephemeral_branch()`; omit `target_branch` to keep the same name -- Ephemeral branches are ideal for temporary previews, CI/CD environments, or experiments +- Use `ephemeral_base=True` when branching off another ephemeral branch + (requires `base_branch`) +- Promote an ephemeral branch with `repo.promote_ephemeral_branch()`; omit + `target_branch` to keep the same name +- Ephemeral branches are ideal for temporary previews, CI/CD environments, or + experiments ### Streaming Large Files @@ -440,9 +462,11 @@ commits = await repo.list_commits() **How it works:** -1. When you create a repo with `base_repo`, Pierre links it to the specified GitHub repository +1. When you create a repo with `base_repo`, Pierre links it to the specified + GitHub repository 2. The `pull_upstream()` method fetches the latest changes from GitHub -3. You can then use all Pierre SDK features (diffs, commits, file access) on the synced content +3. You can then use all Pierre SDK features (diffs, commits, file access) on the + synced content 4. The provider is automatically set to `"github"` when using `base_repo` ### Forking Repositories @@ -688,17 +712,20 @@ else: ## Authentication -The SDK uses JWT (JSON Web Tokens) for authentication. When you call `get_remote_url()`, it: +The SDK uses JWT (JSON Web Tokens) for authentication. When you call +`get_remote_url()`, it: 1. Creates a JWT with your name, repository ID, and requested permissions 2. Signs it with your private key (ES256, RS256, or EdDSA) 3. Embeds it in the Git remote URL as the password -The generated URLs are compatible with standard Git clients and include all necessary authentication. +The generated URLs are compatible with standard Git clients and include all +necessary authentication. ### Manual JWT Generation -For advanced use cases, you can generate JWTs manually using the `generate_jwt` helper: +For advanced use cases, you can generate JWTs manually using the `generate_jwt` +helper: ```python from pierre_storage import generate_jwt @@ -725,11 +752,13 @@ git_url = f"https://t:{token}@your-name.code.storage/your-repo-id.git" - `key_pem` (required): Private key in PEM format (PKCS8) - `issuer` (required): Token issuer (your customer name) - `repo_id` (required): Repository identifier -- `scopes` (optional): List of permission scopes. Defaults to `["git:write", "git:read"]` +- `scopes` (optional): List of permission scopes. Defaults to + `["git:write", "git:read"]` - Available scopes: `"git:read"`, `"git:write"`, `"repo:write"` - `ttl` (optional): Time-to-live in seconds. Defaults to 31536000 (1 year) -The function automatically detects the key type (RSA, EC, or EdDSA) and uses the appropriate signing algorithm (RS256, ES256, or EdDSA). +The function automatically detects the key type (RSA, EC, or EdDSA) and uses the +appropriate signing algorithm (RS256, ES256, or EdDSA). ## Error Handling diff --git a/packages/git-storage-sdk-python/moon.yml b/packages/git-storage-sdk-python/moon.yml index ead6de44f..79a4372ae 100644 --- a/packages/git-storage-sdk-python/moon.yml +++ b/packages/git-storage-sdk-python/moon.yml @@ -22,7 +22,9 @@ tasks: - pyproject.toml test-coverage: - command: ./venv/bin/pytest --cov=pierre_storage --cov-report=term-missing --cov-report=html + command: + ./venv/bin/pytest --cov=pierre_storage --cov-report=term-missing + --cov-report=html deps: - ~:setup inputs: @@ -85,7 +87,9 @@ tasks: - ~:check-package clean: - command: rm -rf venv/ dist/ build/ *.egg-info htmlcov/ .coverage .pytest_cache/ .mypy_cache/ .ruff_cache/ + command: + rm -rf venv/ dist/ build/ *.egg-info htmlcov/ .coverage .pytest_cache/ + .mypy_cache/ .ruff_cache/ options: cache: false shell: true From daf34c5f1377216556d75b38bfd916ce08079d0f Mon Sep 17 00:00:00 2001 From: fat Date: Thu, 29 Jan 2026 20:37:06 -0800 Subject: [PATCH 3/4] kill slop --- .../git-storage-sdk-python/PROJECT_SUMMARY.md | 272 ------------------ 1 file changed, 272 deletions(-) delete mode 100644 packages/git-storage-sdk-python/PROJECT_SUMMARY.md diff --git a/packages/git-storage-sdk-python/PROJECT_SUMMARY.md b/packages/git-storage-sdk-python/PROJECT_SUMMARY.md deleted file mode 100644 index f5a1a1dbd..000000000 --- a/packages/git-storage-sdk-python/PROJECT_SUMMARY.md +++ /dev/null @@ -1,272 +0,0 @@ -# Pierre Git Storage Python SDK - Project Summary - -## ✅ Completed - Version 0.4.2 - -A fully-functional, production-ready Python SDK for Pierre Git Storage, -mirroring the TypeScript SDK functionality. - -### Latest Updates (v0.4.2) - -- ✅ One-shot diff commits via `create_commit_from_diff` -- ✅ Shared commit option normalization across builder/diff flows -- ✅ Expanded SDK documentation and quick-start examples -- ✅ Version bump to keep PyPI metadata current - -## 📊 Project Statistics - -- **Total Code**: ~2,935 lines -- **Core Modules**: 8 Python modules -- **Test Files**: 2 test modules -- **Unit Tests**: 26 tests (100% passing ✅) -- **Test Coverage**: 45% overall (93% for client.py, 83% for webhook.py) -- **Python Version**: 3.8+ support - -## 🗂️ Project Structure - -``` -packages/git-storage-sdk-python/ -├── pierre_storage/ # Main package -│ ├── __init__.py # Public API exports -│ ├── auth.py # JWT authentication (ES256/RS256/EdDSA) -│ ├── client.py # GitStorage main client (93% coverage) -│ ├── commit.py # CommitBuilder with 4MB streaming -│ ├── errors.py # ApiError & RefUpdateError -│ ├── repo.py # All repository operations -│ ├── types.py # TypedDict type definitions -│ ├── webhook.py # HMAC webhook validation (83% coverage) -│ └── py.typed # PEP 561 type marker -│ -├── tests/ # Test suite -│ ├── conftest.py # Shared fixtures -│ ├── test_client.py # Client & JWT tests (18 tests) -│ └── test_webhook.py # Webhook tests (8 tests) -│ -├── scripts/ -│ └── setup.sh # Moon setup script -│ -├── pyproject.toml # Modern Python packaging -├── moon.yml # Moon task configuration -├── README.md # Complete documentation -├── QUICKSTART.md # Getting started guide -├── CONTRIBUTING.md # Contribution guidelines -├── DEVELOPMENT.md # Technical architecture docs -└── LICENSE # MIT license -``` - -## 🎯 Key Features - -### Core Functionality - -- ✅ Repository creation and management -- ✅ JWT-based authentication (ES256, RS256, EdDSA) -- ✅ Public JWT helper for manual token generation -- ✅ File operations (get stream, list) -- ✅ Branch & commit listing with pagination -- ✅ Branch and commit diffs -- ✅ Pull from upstream -- ✅ Restore commits -- ✅ Webhook signature validation - -### Developer Experience - -- ✅ Full type hints throughout -- ✅ Async/await API -- ✅ Fluent commit builder API -- ✅ Streaming support for large files (4MB chunks) -- ✅ Comprehensive error handling -- ✅ Well-documented with docstrings -- ✅ Unit test coverage - -## 🧪 Testing - -### Running Tests - -```bash -# Using virtual environment -cd packages/git-storage-sdk-python -python3 -m venv venv -source venv/bin/activate -pip install -e ".[dev]" -pytest -v - -# Using Moon -moon run git-storage-sdk-python:setup -moon run git-storage-sdk-python:test -``` - -### Test Results - -``` -26 passed in 0.11s ✅ -Coverage: 45% overall -- client.py: 93% ⭐ -- webhook.py: 83% ⭐ -- types.py: 100% ⭐ -``` - -## 📦 Dependencies - -### Required (Runtime) - -- `httpx` - Async HTTP client with streaming -- `pyjwt` - JWT encoding/decoding -- `cryptography` - Key management -- `pydantic` - Data validation -- `typing-extensions` - Type hint backports (Python 3.8-3.9) - -### Development - -- `pytest` - Test framework -- `pytest-asyncio` - Async test support -- `pytest-cov` - Coverage reporting -- `mypy` - Type checking -- `ruff` - Fast linting and formatting - -## 🚀 Usage Examples - -### Basic Usage - -```python -from pierre_storage import GitStorage - -# Initialize client -storage = GitStorage({ - "name": "your-name", - "key": "your-private-key-pem", -}) - -# Create repository -repo = await storage.create_repo() - -# Create commit with streaming -result = await ( - repo.create_commit({ - "target_branch": "main", - "commit_message": "Initial commit", - "author": {"name": "Bot", "email": "bot@example.com"}, - }) - .add_file_from_string("README.md", "# My Project") - .add_file("data.bin", large_file_stream) - .send() -) - -print(f"Commit: {result['commit_sha']}") - -# Apply an existing diff without using the builder -diff_text = """\ ---- a/README.md -+++ b/README.md -@@ --Old line -+New line -""" - -result = await repo.create_commit_from_diff( - target_branch="main", - commit_message="Apply diff", - diff=diff_text, - author={"name": "Bot", "email": "bot@example.com"}, - base_branch="release", # optional -) -print(f"Diff commit: {result['commit_sha']}") -``` - -### Manual JWT Generation - -```python -from pierre_storage import generate_jwt - -# Generate JWT token directly -token = generate_jwt( - key_pem=private_key, - issuer="your-name", - repo_id="repo-id", - scopes=["git:write", "git:read"], - ttl=3600 -) - -# Use in Git URL -git_url = f"https://t:{token}@your-name.code.storage/repo-id.git" -``` - -## 🔧 Moon Tasks - -Available tasks in `moon.yml`: - -```bash -moon run :setup # Create venv and install deps -moon run :test # Run unit tests -moon run :test-coverage # Run tests with coverage report -moon run :typecheck # Run mypy type checking -moon run :lint # Run ruff linting -moon run :format # Check code formatting -moon run :format-write # Auto-format code -moon run :build # Build distributable package -moon run :clean # Clean all generated files -``` - -## 📝 Documentation - -- **README.md** - Complete API reference and usage examples -- **QUICKSTART.md** - Quick start guide for new users -- **CONTRIBUTING.md** - Development workflow and guidelines -- **DEVELOPMENT.md** - Architecture and technical details -- **PROJECT_SUMMARY.md** - This file - -## 🎨 Code Quality - -- **Type Safety**: Full mypy type hints -- **Linting**: Ruff configured for modern Python -- **Formatting**: Ruff formatter -- **Testing**: Comprehensive unit test suite -- **Documentation**: Docstrings on all public APIs - -## 🌟 Highlights - -1. **Feature Parity**: 100% feature parity with TypeScript SDK -2. **Pythonic API**: Follows Python conventions and best practices -3. **Async First**: All I/O operations are async for performance -4. **Streaming**: Large file support with 4MB chunking -5. **Type Safe**: Full type hints for IDE support -6. **Well Tested**: 26 unit tests covering core functionality -7. **Production Ready**: Error handling, validation, documentation - -## 📋 Future Enhancements - -Potential improvements (not required for v0.1.2): - -- [ ] Increase test coverage to 80%+ -- [ ] Add retry logic with exponential backoff -- [ ] Add progress callbacks for uploads -- [ ] Add caching layer for frequently accessed data -- [ ] Add batch operation optimizations -- [ ] Integration tests (optional) - -## 🎓 Comparison with TypeScript SDK - -| Feature | TypeScript | Python | Status | -| --------------------- | ---------- | ------ | -------- | -| Repository operations | ✅ | ✅ | Complete | -| JWT authentication | ✅ | ✅ | Complete | -| Commit builder | ✅ | ✅ | Complete | -| File streaming | ✅ | ✅ | Complete | -| Webhook validation | ✅ | ✅ | Complete | -| Error handling | ✅ | ✅ | Complete | -| Type definitions | ✅ | ✅ | Complete | -| Documentation | ✅ | ✅ | Complete | -| Unit tests | ✅ | ✅ | Complete | - -## ✨ Ready for Use - -The Python SDK is **production-ready** and can be: - -- ✅ Published to PyPI -- ✅ Used in production applications -- ✅ Integrated into existing Python projects -- ✅ Extended with additional features - -## 📞 Support - -- GitHub Issues: For bug reports and feature requests -- Documentation: See README.md for complete API reference -- Examples: See QUICKSTART.md for usage examples From b184be729b0880fc38197ffe85e51494516092a9 Mon Sep 17 00:00:00 2001 From: fat Date: Thu, 29 Jan 2026 20:40:54 -0800 Subject: [PATCH 4/4] run bun --- bun.lock | 146 +++++++++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 136 insertions(+), 10 deletions(-) diff --git a/bun.lock b/bun.lock index 93de23f10..24073317d 100644 --- a/bun.lock +++ b/bun.lock @@ -176,6 +176,20 @@ "react-dom": "^18.3.1 || ^19.0.0", }, }, + "packages/git-storage-sdk-node": { + "name": "@pierre/storage", + "version": "0.9.3", + "dependencies": { + "jose": "^5.10.0", + "snakecase-keys": "^9.0.2", + "zod": "^3.23.8", + }, + "devDependencies": { + "tsup": "8.5.0", + "typescript": "5.8.3", + "vitest": "3.2.4", + }, + }, "packages/storage-elements": { "name": "@pierre/storage-elements", "version": "0.0.2", @@ -696,7 +710,7 @@ "@pierre/solid-diff-demo": ["@pierre/solid-diff-demo@workspace:apps/solid-diff-demo"], - "@pierre/storage": ["@pierre/storage@0.0.10", "", { "dependencies": { "jose": "^5.10.0", "snakecase-keys": "^9.0.2" } }, "sha512-fDFrsnRiNTq7lpY56MGE4vRjYAHfTFLsxN18wbnxBlOk1XB1pxzgMfx5DwiqRcXs8+Gt2lmeXvlYAa1gGR4c7w=="], + "@pierre/storage": ["@pierre/storage@workspace:packages/git-storage-sdk-node"], "@pierre/storage-elements": ["@pierre/storage-elements@workspace:packages/storage-elements"], @@ -998,8 +1012,12 @@ "@types/bun": ["@types/bun@1.3.2", "", { "dependencies": { "bun-types": "1.3.2" } }, "sha512-t15P7k5UIgHKkxwnMNkJbWlh/617rkDGEdSsDbu+qNHTaz9SKf7aC8fiIlUdD5RPpH6GEkP0cK7WlvmrEBRtWg=="], + "@types/chai": ["@types/chai@5.2.3", "", { "dependencies": { "@types/deep-eql": "*", "assertion-error": "^2.0.1" } }, "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA=="], + "@types/debug": ["@types/debug@4.1.12", "", { "dependencies": { "@types/ms": "*" } }, "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ=="], + "@types/deep-eql": ["@types/deep-eql@4.0.2", "", {}, "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw=="], + "@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="], "@types/estree-jsx": ["@types/estree-jsx@1.0.5", "", { "dependencies": { "@types/estree": "*" } }, "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg=="], @@ -1060,6 +1078,20 @@ "@vitejs/plugin-react": ["@vitejs/plugin-react@5.0.3", "", { "dependencies": { "@babel/core": "^7.28.4", "@babel/plugin-transform-react-jsx-self": "^7.27.1", "@babel/plugin-transform-react-jsx-source": "^7.27.1", "@rolldown/pluginutils": "1.0.0-beta.35", "@types/babel__core": "^7.20.5", "react-refresh": "^0.17.0" }, "peerDependencies": { "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" } }, "sha512-PFVHhosKkofGH0Yzrw1BipSedTH68BFF8ZWy1kfUpCtJcouXXY0+racG8sExw7hw0HoX36813ga5o3LTWZ4FUg=="], + "@vitest/expect": ["@vitest/expect@3.2.4", "", { "dependencies": { "@types/chai": "^5.2.2", "@vitest/spy": "3.2.4", "@vitest/utils": "3.2.4", "chai": "^5.2.0", "tinyrainbow": "^2.0.0" } }, "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig=="], + + "@vitest/mocker": ["@vitest/mocker@3.2.4", "", { "dependencies": { "@vitest/spy": "3.2.4", "estree-walker": "^3.0.3", "magic-string": "^0.30.17" }, "peerDependencies": { "msw": "^2.4.9", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "optionalPeers": ["msw", "vite"] }, "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ=="], + + "@vitest/pretty-format": ["@vitest/pretty-format@3.2.4", "", { "dependencies": { "tinyrainbow": "^2.0.0" } }, "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA=="], + + "@vitest/runner": ["@vitest/runner@3.2.4", "", { "dependencies": { "@vitest/utils": "3.2.4", "pathe": "^2.0.3", "strip-literal": "^3.0.0" } }, "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ=="], + + "@vitest/snapshot": ["@vitest/snapshot@3.2.4", "", { "dependencies": { "@vitest/pretty-format": "3.2.4", "magic-string": "^0.30.17", "pathe": "^2.0.3" } }, "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ=="], + + "@vitest/spy": ["@vitest/spy@3.2.4", "", { "dependencies": { "tinyspy": "^4.0.3" } }, "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw=="], + + "@vitest/utils": ["@vitest/utils@3.2.4", "", { "dependencies": { "@vitest/pretty-format": "3.2.4", "loupe": "^3.1.4", "tinyrainbow": "^2.0.0" } }, "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA=="], + "abbrev": ["abbrev@3.0.1", "", {}, "sha512-AO2ac6pjRB3SJmGJo+v5/aK6Omggp6fsLrs6wN9bd35ulu4cCwaAU9+7ZhXjeqHVkaHThLuzH0nZr0YpCDhygg=="], "abort-controller": ["abort-controller@3.0.0", "", { "dependencies": { "event-target-shim": "^5.0.0" } }, "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg=="], @@ -1090,6 +1122,8 @@ "ansis": ["ansis@4.2.0", "", {}, "sha512-HqZ5rWlFjGiV0tDm3UxxgNRqsOTniqoKZu0pIAfh7TZQMGuZK+hH0drySty0si0QXj1ieop4+SkSfPZBPPkHig=="], + "any-promise": ["any-promise@1.3.0", "", {}, "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A=="], + "anymatch": ["anymatch@3.1.3", "", { "dependencies": { "normalize-path": "^3.0.0", "picomatch": "^2.0.4" } }, "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw=="], "archiver": ["archiver@7.0.1", "", { "dependencies": { "archiver-utils": "^5.0.2", "async": "^3.2.4", "buffer-crc32": "^1.0.0", "readable-stream": "^4.0.0", "readdir-glob": "^1.1.2", "tar-stream": "^3.0.0", "zip-stream": "^6.0.1" } }, "sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ=="], @@ -1102,6 +1136,8 @@ "array-union": ["array-union@2.1.0", "", {}, "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw=="], + "assertion-error": ["assertion-error@2.0.1", "", {}, "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA=="], + "ast-kit": ["ast-kit@2.2.0", "", { "dependencies": { "@babel/parser": "^7.28.5", "pathe": "^2.0.3" } }, "sha512-m1Q/RaVOnTp9JxPX+F+Zn7IcLYMzM8kZofDImfsKZd8MbR+ikdOzTeztStWqfrqIxZnYWryyI9ePm3NGjnZgGw=="], "ast-types": ["ast-types@0.16.1", "", { "dependencies": { "tslib": "^2.0.1" } }, "sha512-6t10qk83GOG8p0vKmaCr8eiilZwO171AvbROMtvvNiwrTly62t+7XkA8RdIIVbpMhCASAsxgAzdRSwh6nw/5Dg=="], @@ -1160,6 +1196,8 @@ "bun-types": ["bun-types@1.3.2", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-i/Gln4tbzKNuxP70OWhJRZz1MRfvqExowP7U6JKoI8cntFrtxg7RJK3jvz7wQW54UuvNC8tbKHHri5fy74FVqg=="], + "bundle-require": ["bundle-require@5.1.0", "", { "dependencies": { "load-tsconfig": "^0.2.3" }, "peerDependencies": { "esbuild": ">=0.18" } }, "sha512-3WrrOuZiyaaZPWiEt4G3+IffISVC9HYlWueJEBWED4ZH4aIAC2PnkdnuRrR94M+w6yGWn4AglWtJtBI8YqvgoA=="], + "bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="], "c12": ["c12@3.3.2", "", { "dependencies": { "chokidar": "^4.0.3", "confbox": "^0.2.2", "defu": "^6.1.4", "dotenv": "^17.2.3", "exsolve": "^1.0.8", "giget": "^2.0.0", "jiti": "^2.6.1", "ohash": "^2.0.11", "pathe": "^2.0.3", "perfect-debounce": "^2.0.0", "pkg-types": "^2.3.0", "rc9": "^2.1.2" }, "peerDependencies": { "magicast": "*" }, "optionalPeers": ["magicast"] }, "sha512-QkikB2X5voO1okL3QsES0N690Sn/K9WokXqUsDQsWy5SnYb+psYQFGA10iy1bZHj3fjISKsI67Q90gruvWWM3A=="], @@ -1180,6 +1218,8 @@ "ccount": ["ccount@2.0.1", "", {}, "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg=="], + "chai": ["chai@5.3.3", "", { "dependencies": { "assertion-error": "^2.0.1", "check-error": "^2.1.1", "deep-eql": "^5.0.1", "loupe": "^3.1.0", "pathval": "^2.0.0" } }, "sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw=="], + "chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], "change-case": ["change-case@5.4.4", "", {}, "sha512-HRQyTk2/YPEkt9TnUPbOpr64Uw3KOicFWPVBb+xiHvd6eBx/qPr9xqfBFDT8P2vWsvvz4jbEkfDe71W3VyNu2w=="], @@ -1192,6 +1232,8 @@ "character-reference-invalid": ["character-reference-invalid@2.0.1", "", {}, "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw=="], + "check-error": ["check-error@2.1.3", "", {}, "sha512-PAJdDJusoxnwm1VwW07VWwUN1sl7smmC3OKggvndJFadxxDRyFJBX/ggnu/KE4kQAB7a3Dp8f/YXC1FlUprWmA=="], + "chokidar": ["chokidar@4.0.3", "", { "dependencies": { "readdirp": "^4.0.1" } }, "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA=="], "chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="], @@ -1304,6 +1346,8 @@ "dedent": ["dedent@1.7.0", "", { "peerDependencies": { "babel-plugin-macros": "^3.1.0" }, "optionalPeers": ["babel-plugin-macros"] }, "sha512-HGFtf8yhuhGhqO07SV79tRp+br4MnbdjeVxotpn1QBl30pcLLCQjX5b2295ll0fv8RKDKsmWYrl05usHM9CewQ=="], + "deep-eql": ["deep-eql@5.0.2", "", {}, "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q=="], + "deep-is": ["deep-is@0.1.4", "", {}, "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="], "deepmerge": ["deepmerge@4.3.1", "", {}, "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A=="], @@ -1448,6 +1492,8 @@ "execa": ["execa@9.6.1", "", { "dependencies": { "@sindresorhus/merge-streams": "^4.0.0", "cross-spawn": "^7.0.6", "figures": "^6.1.0", "get-stream": "^9.0.0", "human-signals": "^8.0.1", "is-plain-obj": "^4.1.0", "is-stream": "^4.0.1", "npm-run-path": "^6.0.0", "pretty-ms": "^9.2.0", "signal-exit": "^4.1.0", "strip-final-newline": "^4.0.0", "yoctocolors": "^2.1.1" } }, "sha512-9Be3ZoN4LmYR90tUoVu2te2BsbzHfhJyfEiAVfz7N5/zv+jduIfLrV2xdQXOHbaD6KgpGdO9PRPM1Y4Q9QkPkA=="], + "expect-type": ["expect-type@1.3.0", "", {}, "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA=="], + "express": ["express@5.2.1", "", { "dependencies": { "accepts": "^2.0.0", "body-parser": "^2.2.1", "content-disposition": "^1.0.0", "content-type": "^1.0.5", "cookie": "^0.7.1", "cookie-signature": "^1.2.1", "debug": "^4.4.0", "depd": "^2.0.0", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "finalhandler": "^2.1.0", "fresh": "^2.0.0", "http-errors": "^2.0.0", "merge-descriptors": "^2.0.0", "mime-types": "^3.0.0", "on-finished": "^2.4.1", "once": "^1.4.0", "parseurl": "^1.3.3", "proxy-addr": "^2.0.7", "qs": "^6.14.0", "range-parser": "^1.2.1", "router": "^2.2.0", "send": "^1.1.0", "serve-static": "^2.2.0", "statuses": "^2.0.1", "type-is": "^2.0.1", "vary": "^1.1.2" } }, "sha512-hIS4idWWai69NezIdRt2xFVofaF4j+6INOpJlVOLDO8zXGpUVEVzIYk12UUi2JzjEzWL3IOAxcTubgz9Po0yXw=="], "express-rate-limit": ["express-rate-limit@7.5.1", "", { "peerDependencies": { "express": ">= 4.11" } }, "sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw=="], @@ -1492,6 +1538,8 @@ "find-up": ["find-up@5.0.0", "", { "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" } }, "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng=="], + "fix-dts-default-cjs-exports": ["fix-dts-default-cjs-exports@1.0.1", "", { "dependencies": { "magic-string": "^0.30.17", "mlly": "^1.7.4", "rollup": "^4.34.8" } }, "sha512-pVIECanWFC61Hzl2+oOCtoJ3F17kglZC/6N94eRWycFgBH35hHx0Li604ZIzhseh97mf2p0cv7vVrOZGoqhlEg=="], + "flat-cache": ["flat-cache@4.0.1", "", { "dependencies": { "flatted": "^3.2.9", "keyv": "^4.5.4" } }, "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw=="], "flatted": ["flatted@3.3.3", "", {}, "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg=="], @@ -1706,6 +1754,8 @@ "jose": ["jose@5.10.0", "", {}, "sha512-s+3Al/p9g32Iq+oqXxkW//7jk2Vig6FF1CFqzVXoTUXt2qz89YWbL+OwS17NFYEvxC35n0FKeGO2LGYSxeM2Gg=="], + "joycon": ["joycon@3.1.1", "", {}, "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw=="], + "js-tokens": ["js-tokens@9.0.1", "", {}, "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ=="], "js-yaml": ["js-yaml@4.1.1", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA=="], @@ -1764,10 +1814,14 @@ "lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.2", "", { "os": "win32", "cpu": "x64" }, "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw=="], + "lilconfig": ["lilconfig@3.1.3", "", {}, "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw=="], + "lines-and-columns": ["lines-and-columns@1.2.4", "", {}, "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg=="], "listhen": ["listhen@1.9.0", "", { "dependencies": { "@parcel/watcher": "^2.4.1", "@parcel/watcher-wasm": "^2.4.1", "citty": "^0.1.6", "clipboardy": "^4.0.0", "consola": "^3.2.3", "crossws": ">=0.2.0 <0.4.0", "defu": "^6.1.4", "get-port-please": "^3.1.2", "h3": "^1.12.0", "http-shutdown": "^1.2.2", "jiti": "^2.1.2", "mlly": "^1.7.1", "node-forge": "^1.3.1", "pathe": "^1.1.2", "std-env": "^3.7.0", "ufo": "^1.5.4", "untun": "^0.1.3", "uqr": "^0.1.2" }, "bin": { "listen": "bin/listhen.mjs", "listhen": "bin/listhen.mjs" } }, "sha512-I8oW2+QL5KJo8zXNWX046M134WchxsXC7SawLPvRQpogCbkyQIaFxPE89A2HiwR7vAK2Dm2ERBAmyjTYGYEpBg=="], + "load-tsconfig": ["load-tsconfig@0.2.5", "", {}, "sha512-IXO6OCs9yg8tMKzfPZ1YmheJbZCiEsnBdcB03l0OcfK9prKnJb96siuHCr5Fl37/yo9DnKU+TLpxzTUspw9shg=="], + "local-pkg": ["local-pkg@1.1.2", "", { "dependencies": { "mlly": "^1.7.4", "pkg-types": "^2.3.0", "quansync": "^0.2.11" } }, "sha512-arhlxbFRmoQHl33a0Zkle/YWlmNwoyt6QNZEIJcqNbdrsix5Lvc4HyyI3EnwxTYlZYc32EbYrQ8SzEZ7dqgg9A=="], "locate-path": ["locate-path@6.0.0", "", { "dependencies": { "p-locate": "^5.0.0" } }, "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw=="], @@ -1782,12 +1836,16 @@ "lodash.merge": ["lodash.merge@4.6.2", "", {}, "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="], + "lodash.sortby": ["lodash.sortby@4.7.0", "", {}, "sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA=="], + "lodash.truncate": ["lodash.truncate@4.4.2", "", {}, "sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw=="], "log-symbols": ["log-symbols@6.0.0", "", { "dependencies": { "chalk": "^5.3.0", "is-unicode-supported": "^1.3.0" } }, "sha512-i24m8rpwhmPIS4zscNzK6MSEhk0DUWa/8iYQWxhffV8jkI4Phvs3F+quL5xvS0gdQR0FyTCMMH33Y78dDTzzIw=="], "longest-streak": ["longest-streak@3.1.0", "", {}, "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g=="], + "loupe": ["loupe@3.2.1", "", {}, "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ=="], + "lower-case": ["lower-case@2.0.2", "", { "dependencies": { "tslib": "^2.0.3" } }, "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg=="], "lru-cache": ["lru-cache@11.2.4", "", {}, "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg=="], @@ -1954,6 +2012,8 @@ "mute-stream": ["mute-stream@2.0.0", "", {}, "sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA=="], + "mz": ["mz@2.7.0", "", { "dependencies": { "any-promise": "^1.0.0", "object-assign": "^4.0.1", "thenify-all": "^1.0.0" } }, "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q=="], + "nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="], "natural-compare": ["natural-compare@1.4.0", "", {}, "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw=="], @@ -2068,6 +2128,8 @@ "pathe": ["pathe@1.1.2", "", {}, "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ=="], + "pathval": ["pathval@2.0.1", "", {}, "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ=="], + "perfect-debounce": ["perfect-debounce@2.0.0", "", {}, "sha512-fkEH/OBiKrqqI/yIgjR92lMfs2K8105zt/VT6+7eTjNwisrsh47CeIED9z58zI7DfKdH3uHAn25ziRZn3kgAow=="], "picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="], @@ -2076,6 +2138,8 @@ "pify": ["pify@2.3.0", "", {}, "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog=="], + "pirates": ["pirates@4.0.7", "", {}, "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA=="], + "pkce-challenge": ["pkce-challenge@5.0.1", "", {}, "sha512-wQ0b/W4Fr01qtpHlqSqspcj3EhBvimsdh0KlHhH8HRZnMsEa0ea2fTULOXOS9ccQr3om+GcGRk4e+isrZWV8qQ=="], "pkg-types": ["pkg-types@2.3.0", "", { "dependencies": { "confbox": "^0.2.2", "exsolve": "^1.0.7", "pathe": "^2.0.3" } }, "sha512-SIqCzDRg0s9npO5XQ3tNZioRY1uK06lA41ynBC1YmFTmnY6FjUjVt6s4LoADmwoig1qqD0oK8h1p/8mlMx8Oig=="], @@ -2086,6 +2150,8 @@ "postcss-import": ["postcss-import@16.1.1", "", { "dependencies": { "postcss-value-parser": "^4.0.0", "read-cache": "^1.0.0", "resolve": "^1.1.7" }, "peerDependencies": { "postcss": "^8.0.0" } }, "sha512-2xVS1NCZAfjtVdvXiyegxzJ447GyqCeEI5V7ApgQVOWnros1p5lGNovJNapwPpMombyFBfqDwt7AD3n2l0KOfQ=="], + "postcss-load-config": ["postcss-load-config@6.0.1", "", { "dependencies": { "lilconfig": "^3.1.1" }, "peerDependencies": { "jiti": ">=1.21.0", "postcss": ">=8.0.9", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["jiti", "postcss", "tsx", "yaml"] }, "sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g=="], + "postcss-resolve-nested-selector": ["postcss-resolve-nested-selector@0.1.6", "", {}, "sha512-0sglIs9Wmkzbr8lQwEyIzlDOOC9bGmfVKcJTaxv3vMmd3uo4o4DerC3En0bnmgceeql9BfC8hRkp7cg0fjdVqw=="], "postcss-safe-parser": ["postcss-safe-parser@6.0.0", "", { "peerDependencies": { "postcss": "^8.3.3" } }, "sha512-FARHN8pwH+WiS2OPCxJI8FuRJpTVnn6ZNFiqAM2aeW2LwTHWWmWgIyKC6cUo0L8aeKiF/14MNvnpls6R2PBeMQ=="], @@ -2270,6 +2336,8 @@ "side-channel-weakmap": ["side-channel-weakmap@1.0.2", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3", "side-channel-map": "^1.0.1" } }, "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A=="], + "siginfo": ["siginfo@2.0.0", "", {}, "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g=="], + "signal-exit": ["signal-exit@4.1.0", "", {}, "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="], "sisteransi": ["sisteransi@1.0.5", "", {}, "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg=="], @@ -2292,7 +2360,7 @@ "sonner": ["sonner@2.0.7", "", { "peerDependencies": { "react": "^18.0.0 || ^19.0.0 || ^19.0.0-rc", "react-dom": "^18.0.0 || ^19.0.0 || ^19.0.0-rc" } }, "sha512-W6ZN4p58k8aDKA4XPcx2hpIQXBRAgyiWVkYhT7CvK6D3iAu7xjvVyhQHg2/iaKJZ1XVJ4r7XuwGL+WGEK37i9w=="], - "source-map": ["source-map@0.7.6", "", {}, "sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ=="], + "source-map": ["source-map@0.8.0-beta.0", "", { "dependencies": { "whatwg-url": "^7.0.0" } }, "sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA=="], "source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="], @@ -2300,6 +2368,8 @@ "space-separated-tokens": ["space-separated-tokens@2.0.2", "", {}, "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q=="], + "stackback": ["stackback@0.0.2", "", {}, "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw=="], + "stackframe": ["stackframe@1.3.4", "", {}, "sha512-oeVtt7eWQS+Na6F//S4kJ2K2VbRlS9D43mAlMyVpVWovy9o+jfgH8O9agzANzaiLjclA0oYzUXEM4PurhSUChw=="], "standard-as-callback": ["standard-as-callback@2.1.0", "", {}, "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A=="], @@ -2350,6 +2420,8 @@ "stylelint-prettier": ["stylelint-prettier@5.0.3", "", { "dependencies": { "prettier-linter-helpers": "^1.0.0" }, "peerDependencies": { "prettier": ">=3.0.0", "stylelint": ">=16.0.0" } }, "sha512-B6V0oa35ekRrKZlf+6+jA+i50C4GXJ7X1PPmoCqSUoXN6BrNF6NhqqhanvkLjqw2qgvrS0wjdpeC+Tn06KN3jw=="], + "sucrase": ["sucrase@3.35.1", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.2", "commander": "^4.0.0", "lines-and-columns": "^1.1.6", "mz": "^2.7.0", "pirates": "^4.0.1", "tinyglobby": "^0.2.11", "ts-interface-checker": "^0.1.9" }, "bin": { "sucrase": "bin/sucrase", "sucrase-node": "bin/sucrase-node" } }, "sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw=="], + "supports-color": ["supports-color@8.1.1", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q=="], "supports-hyperlinks": ["supports-hyperlinks@3.2.0", "", { "dependencies": { "has-flag": "^4.0.0", "supports-color": "^7.0.0" } }, "sha512-zFObLMyZeEwzAoKCyu1B91U79K2t7ApXuQfo8OuxwXLDgcKxuwM+YvcbIhm6QWqz7mHUH1TVytR1PwVVjEuMig=="], @@ -2384,12 +2456,24 @@ "text-decoder": ["text-decoder@1.2.3", "", { "dependencies": { "b4a": "^1.6.4" } }, "sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA=="], + "thenify": ["thenify@3.3.1", "", { "dependencies": { "any-promise": "^1.0.0" } }, "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw=="], + + "thenify-all": ["thenify-all@1.6.0", "", { "dependencies": { "thenify": ">= 3.1.0 < 4" } }, "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA=="], + "tiny-invariant": ["tiny-invariant@1.3.3", "", {}, "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg=="], + "tinybench": ["tinybench@2.9.0", "", {}, "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg=="], + "tinyexec": ["tinyexec@1.0.2", "", {}, "sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg=="], "tinyglobby": ["tinyglobby@0.2.15", "", { "dependencies": { "fdir": "^6.5.0", "picomatch": "^4.0.3" } }, "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ=="], + "tinypool": ["tinypool@1.1.1", "", {}, "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg=="], + + "tinyrainbow": ["tinyrainbow@2.0.0", "", {}, "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw=="], + + "tinyspy": ["tinyspy@4.0.4", "", {}, "sha512-azl+t0z7pw/z958Gy9svOTuzqIk6xq+NSheJzn5MMWtWTFywIacg2wUlzKFGtt3cthx0r2SxMK0yzJOR0IES7Q=="], + "tldts": ["tldts@7.0.19", "", { "dependencies": { "tldts-core": "^7.0.19" }, "bin": { "tldts": "bin/cli.js" } }, "sha512-8PWx8tvC4jDB39BQw1m4x8y5MH1BcQ5xHeL2n7UVFulMPH/3Q0uiamahFJ3lXA0zO2SUyRXuVVbWSDmstlt9YA=="], "tldts-core": ["tldts-core@7.0.19", "", {}, "sha512-lJX2dEWx0SGH4O6p+7FPwYmJ/bu1JbcGJ8RLaG9b7liIgZ85itUVEPbMtWRVrde/0fnDPEPHW10ZsKW3kVsE9A=="], @@ -2400,7 +2484,7 @@ "tough-cookie": ["tough-cookie@6.0.0", "", { "dependencies": { "tldts": "^7.0.5" } }, "sha512-kXuRi1mtaKMrsLUxz3sQYvVl37B0Ns6MzfrtV5DvJceE9bPyspOqk9xxv7XbZWcfLWbFmm997vl83qUWVJA64w=="], - "tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="], + "tr46": ["tr46@1.0.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA=="], "tree-kill": ["tree-kill@1.2.2", "", { "bin": { "tree-kill": "cli.js" } }, "sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A=="], @@ -2410,6 +2494,8 @@ "ts-api-utils": ["ts-api-utils@2.1.0", "", { "peerDependencies": { "typescript": ">=4.8.4" } }, "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ=="], + "ts-interface-checker": ["ts-interface-checker@0.1.13", "", {}, "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA=="], + "ts-morph": ["ts-morph@26.0.0", "", { "dependencies": { "@ts-morph/common": "~0.27.0", "code-block-writer": "^13.0.3" } }, "sha512-ztMO++owQnz8c/gIENcM9XfCEzgoGphTv+nKpYNM1bgsdOVC/jRZuEBf6N+mLLDNg68Kl+GgUZfOySaRiG1/Ug=="], "tsconfig-paths": ["tsconfig-paths@4.2.0", "", { "dependencies": { "json5": "^2.2.2", "minimist": "^1.2.6", "strip-bom": "^3.0.0" } }, "sha512-NoZ4roiN7LnbKn9QqE1amc9DJfzvZXxF4xDavcOWt1BPkdx+m+0gJuPM+S0vCe7zTJMYUP0R8pO2XMr+Y8oLIg=="], @@ -2418,11 +2504,13 @@ "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + "tsup": ["tsup@8.5.0", "", { "dependencies": { "bundle-require": "^5.1.0", "cac": "^6.7.14", "chokidar": "^4.0.3", "consola": "^3.4.0", "debug": "^4.4.0", "esbuild": "^0.25.0", "fix-dts-default-cjs-exports": "^1.0.0", "joycon": "^3.1.1", "picocolors": "^1.1.1", "postcss-load-config": "^6.0.1", "resolve-from": "^5.0.0", "rollup": "^4.34.8", "source-map": "0.8.0-beta.0", "sucrase": "^3.35.0", "tinyexec": "^0.3.2", "tinyglobby": "^0.2.11", "tree-kill": "^1.2.2" }, "peerDependencies": { "@microsoft/api-extractor": "^7.36.0", "@swc/core": "^1", "postcss": "^8.4.12", "typescript": ">=4.5.0" }, "optionalPeers": ["@microsoft/api-extractor", "@swc/core", "postcss", "typescript"], "bin": { "tsup": "dist/cli-default.js", "tsup-node": "dist/cli-node.js" } }, "sha512-VmBp77lWNQq6PfuMqCHD3xWl22vEoWsKajkF8t+yMBawlUS8JzEI+vOVMeuNZIuMML8qXRizFKi9oD5glKQVcQ=="], + "tw-animate-css": ["tw-animate-css@1.4.0", "", {}, "sha512-7bziOlRqH0hJx80h/3mbicLW7o8qLsH5+RaLR2t+OHM3D0JlWGODQKQ4cxbK7WlvmUxpcj6Kgu6EKqjrGFe3QQ=="], "type-check": ["type-check@0.4.0", "", { "dependencies": { "prelude-ls": "^1.2.1" } }, "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew=="], - "type-fest": ["type-fest@5.3.1", "", { "dependencies": { "tagged-tag": "^1.0.0" } }, "sha512-VCn+LMHbd4t6sF3wfU/+HKT63C9OoyrSIf4b+vtWHpt2U7/4InZG467YDNMFMR70DdHjAdpPWmw2lzRdg0Xqqg=="], + "type-fest": ["type-fest@4.41.0", "", {}, "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA=="], "type-is": ["type-is@2.0.1", "", { "dependencies": { "content-type": "^1.0.5", "media-typer": "^1.1.0", "mime-types": "^3.0.0" } }, "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw=="], @@ -2514,20 +2602,26 @@ "vite": ["rolldown-vite@7.1.12", "", { "dependencies": { "@oxc-project/runtime": "0.90.0", "fdir": "^6.5.0", "lightningcss": "^1.30.1", "picomatch": "^4.0.3", "postcss": "^8.5.6", "rolldown": "1.0.0-beta.39", "tinyglobby": "^0.2.15" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^20.19.0 || >=22.12.0", "esbuild": "^0.25.0", "jiti": ">=1.21.0", "less": "^4.0.0", "sass": "^1.70.0", "sass-embedded": "^1.70.0", "stylus": ">=0.54.8", "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "esbuild", "jiti", "less", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-JREtUS+Lpa3s5Ha3ajf2F4LMS4BFxlVjpGz0k0ZR8rV3ZO3tzk5hukqyi9yRBcrvnTUg/BEForyCDahALFYAZA=="], + "vite-node": ["vite-node@3.2.4", "", { "dependencies": { "cac": "^6.7.14", "debug": "^4.4.1", "es-module-lexer": "^1.7.0", "pathe": "^2.0.3", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "bin": { "vite-node": "vite-node.mjs" } }, "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg=="], + "vite-plugin-solid": ["vite-plugin-solid@2.11.10", "", { "dependencies": { "@babel/core": "^7.23.3", "@types/babel__core": "^7.20.4", "babel-preset-solid": "^1.8.4", "merge-anything": "^5.1.7", "solid-refresh": "^0.6.3", "vitefu": "^1.0.4" }, "peerDependencies": { "@testing-library/jest-dom": "^5.16.6 || ^5.17.0 || ^6.*", "solid-js": "^1.7.2", "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" }, "optionalPeers": ["@testing-library/jest-dom"] }, "sha512-Yr1dQybmtDtDAHkii6hXuc1oVH9CPcS/Zb2jN/P36qqcrkNnVPsMTzQ06jyzFPFjj3U1IYKMVt/9ZqcwGCEbjw=="], "vitefu": ["vitefu@1.1.1", "", { "peerDependencies": { "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0-beta.0" }, "optionalPeers": ["vite"] }, "sha512-B/Fegf3i8zh0yFbpzZ21amWzHmuNlLlmJT6n7bu5e+pCHUKQIfXSYokrqOBGEMMe9UG2sostKQF9mml/vYaWJQ=="], + "vitest": ["vitest@3.2.4", "", { "dependencies": { "@types/chai": "^5.2.2", "@vitest/expect": "3.2.4", "@vitest/mocker": "3.2.4", "@vitest/pretty-format": "^3.2.4", "@vitest/runner": "3.2.4", "@vitest/snapshot": "3.2.4", "@vitest/spy": "3.2.4", "@vitest/utils": "3.2.4", "chai": "^5.2.0", "debug": "^4.4.1", "expect-type": "^1.2.1", "magic-string": "^0.30.17", "pathe": "^2.0.3", "picomatch": "^4.0.2", "std-env": "^3.9.0", "tinybench": "^2.9.0", "tinyexec": "^0.3.2", "tinyglobby": "^0.2.14", "tinypool": "^1.1.1", "tinyrainbow": "^2.0.0", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", "vite-node": "3.2.4", "why-is-node-running": "^2.3.0" }, "peerDependencies": { "@edge-runtime/vm": "*", "@types/debug": "^4.1.12", "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", "@vitest/browser": "3.2.4", "@vitest/ui": "3.2.4", "happy-dom": "*", "jsdom": "*" }, "optionalPeers": ["@edge-runtime/vm", "@types/debug", "@types/node", "@vitest/browser", "@vitest/ui", "happy-dom", "jsdom"], "bin": { "vitest": "vitest.mjs" } }, "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A=="], + "web-streams-polyfill": ["web-streams-polyfill@3.3.3", "", {}, "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw=="], - "webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="], + "webidl-conversions": ["webidl-conversions@4.0.2", "", {}, "sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg=="], "webpack-virtual-modules": ["webpack-virtual-modules@0.6.2", "", {}, "sha512-66/V2i5hQanC51vBQKPH4aI8NMAcBW59FVBs+rC7eGHupMyfn34q7rZIE+ETlJ+XTevqfUhVVBgSUNSW2flEUQ=="], - "whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="], + "whatwg-url": ["whatwg-url@7.1.0", "", { "dependencies": { "lodash.sortby": "^4.7.0", "tr46": "^1.0.1", "webidl-conversions": "^4.0.2" } }, "sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg=="], "which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], + "why-is-node-running": ["why-is-node-running@2.3.0", "", { "dependencies": { "siginfo": "^2.0.0", "stackback": "0.0.2" }, "bin": { "why-is-node-running": "cli.js" } }, "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w=="], + "widest-line": ["widest-line@5.0.0", "", { "dependencies": { "string-width": "^7.0.0" } }, "sha512-c9bZp7b5YtRj2wOe6dlj32MK+Bx/M/d+9VB2SHM1OtsUHR0aV0tdP6DWh/iMt0kWi1t5g1Iudu6hQRNd1A4PVA=="], "word-wrap": ["word-wrap@1.2.5", "", {}, "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA=="], @@ -2604,6 +2698,8 @@ "@mapbox/node-pre-gyp/semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="], + "@mdx-js/mdx/source-map": ["source-map@0.7.6", "", {}, "sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ=="], + "@modelcontextprotocol/sdk/ajv": ["ajv@8.17.1", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2" } }, "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g=="], "@modelcontextprotocol/sdk/jose": ["jose@6.1.3", "", {}, "sha512-0TpaTfihd4QMNwrz/ob2Bp7X04yuxJkjRGi4aKmOqwhov54i6u79oCv7T+C7lo70MKH6BesI3vscD1yb/yzKXQ=="], @@ -2614,6 +2710,10 @@ "@pierre/solid-diff-demo/vite": ["vite@6.4.1", "", { "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.4.4", "picomatch": "^4.0.2", "postcss": "^8.5.3", "rollup": "^4.34.9", "tinyglobby": "^0.2.13" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", "jiti": ">=1.21.0", "less": "*", "lightningcss": "^1.21.0", "sass": "*", "sass-embedded": "*", "stylus": "*", "sugarss": "*", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g=="], + "@pierre/storage/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], + + "@pierre/storage-elements-next/@pierre/storage": ["@pierre/storage@0.0.10", "", { "dependencies": { "jose": "^5.10.0", "snakecase-keys": "^9.0.2" } }, "sha512-fDFrsnRiNTq7lpY56MGE4vRjYAHfTFLsxN18wbnxBlOk1XB1pxzgMfx5DwiqRcXs8+Gt2lmeXvlYAa1gGR4c7w=="], + "@poppinss/dumper/supports-color": ["supports-color@10.2.2", "", {}, "sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g=="], "@rollup/plugin-commonjs/estree-walker": ["estree-walker@2.0.2", "", {}, "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w=="], @@ -2662,6 +2762,10 @@ "@vinxi/listhen/jiti": ["jiti@1.21.7", "", { "bin": { "jiti": "bin/jiti.js" } }, "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A=="], + "@vitest/runner/pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="], + + "@vitest/snapshot/pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="], + "ajv-formats/ajv": ["ajv@8.17.1", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2" } }, "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g=="], "anymatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="], @@ -2678,8 +2782,6 @@ "boxen/string-width": ["string-width@7.2.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="], - "boxen/type-fest": ["type-fest@4.41.0", "", {}, "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA=="], - "brace-expansion/balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="], "c12/pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="], @@ -2698,6 +2800,10 @@ "dax-sh/undici-types": ["undici-types@5.28.4", "", {}, "sha512-3OeMF5Lyowe8VW0skf5qaIE7Or3yS9LS7fvMUI0gg4YxpIBVg0L8BxCmROw2CcYhSkpR68Epz7CGc8MPj94Uww=="], + "dot-prop/type-fest": ["type-fest@5.3.1", "", { "dependencies": { "tagged-tag": "^1.0.0" } }, "sha512-VCn+LMHbd4t6sF3wfU/+HKT63C9OoyrSIf4b+vtWHpt2U7/4InZG467YDNMFMR70DdHjAdpPWmw2lzRdg0Xqqg=="], + + "estree-util-to-js/source-map": ["source-map@0.7.6", "", {}, "sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ=="], + "express/cookie": ["cookie@0.7.2", "", {}, "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w=="], "express/send": ["send@1.2.0", "", { "dependencies": { "debug": "^4.3.5", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "fresh": "^2.0.0", "http-errors": "^2.0.0", "mime-types": "^3.0.1", "ms": "^2.1.3", "on-finished": "^2.4.1", "range-parser": "^1.2.1", "statuses": "^2.0.1" } }, "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw=="], @@ -2740,6 +2846,8 @@ "mlly/pkg-types": ["pkg-types@1.3.1", "", { "dependencies": { "confbox": "^0.1.8", "mlly": "^1.7.4", "pathe": "^2.0.1" } }, "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ=="], + "msw/type-fest": ["type-fest@5.3.1", "", { "dependencies": { "tagged-tag": "^1.0.0" } }, "sha512-VCn+LMHbd4t6sF3wfU/+HKT63C9OoyrSIf4b+vtWHpt2U7/4InZG467YDNMFMR70DdHjAdpPWmw2lzRdg0Xqqg=="], + "next/postcss": ["postcss@8.4.31", "", { "dependencies": { "nanoid": "^3.3.6", "picocolors": "^1.0.0", "source-map-js": "^1.0.2" } }, "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ=="], "nitropack/escape-string-regexp": ["escape-string-regexp@5.0.0", "", {}, "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw=="], @@ -2756,6 +2864,8 @@ "nitropack/serve-static": ["serve-static@2.2.0", "", { "dependencies": { "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "parseurl": "^1.3.3", "send": "^1.2.0" } }, "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ=="], + "nitropack/source-map": ["source-map@0.7.6", "", {}, "sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ=="], + "nitropack/unenv": ["unenv@2.0.0-rc.24", "", { "dependencies": { "pathe": "^2.0.3" } }, "sha512-i7qRCmY42zmCwnYlh9H2SvLEypEFGye5iRmEMKjcGi7zk9UquigRjFtTLz0TYqr0ZGLZhaMHl/foy1bZR+Cwlw=="], "npm-run-path/path-key": ["path-key@4.0.0", "", {}, "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ=="], @@ -2788,6 +2898,8 @@ "rolldown/@rolldown/pluginutils": ["@rolldown/pluginutils@1.0.0-beta.45", "", {}, "sha512-Le9ulGCrD8ggInzWw/k2J8QcbPz7eGIOWqfJ2L+1R0Opm7n6J37s2hiDWlh6LJN0Lk9L5sUzMvRHKW7UxBZsQA=="], + "rollup-plugin-visualizer/source-map": ["source-map@0.7.6", "", {}, "sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ=="], + "router/path-to-regexp": ["path-to-regexp@8.3.0", "", {}, "sha512-7jdwVIRtsP8MYpdXSwOS0YdD0Du+qOoF/AEPIt88PcCFrZCzx41oxku1jD88hZBwbNUIEfpqvuhjFaMAqMTWnA=="], "send/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], @@ -2812,8 +2924,6 @@ "sharp/semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="], - "snakecase-keys/type-fest": ["type-fest@4.41.0", "", {}, "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA=="], - "solid-js/seroval": ["seroval@1.3.2", "", {}, "sha512-RbcPH1n5cfwKrru7v7+zrZvjLurgHhGyso3HTyGtRivGWgYjbOmGuivCQaORNELjNONoK35nj28EoWul9sb1zQ=="], "solid-js/seroval-plugins": ["seroval-plugins@1.3.3", "", { "peerDependencies": { "seroval": "^1.0" } }, "sha512-16OL3NnUBw8JG1jBLUoZJsLnQq0n5Ua6aHalhJK4fMQkz1lqR7Osz1sA30trBtd9VUDc2NgkuRCn8+/pBwqZ+w=="], @@ -2830,6 +2940,8 @@ "stylelint/postcss-safe-parser": ["postcss-safe-parser@7.0.1", "", { "peerDependencies": { "postcss": "^8.4.31" } }, "sha512-0AioNCJZ2DPYz5ABT6bddIqlhgwhpHZ/l65YAYo0BCIn0xiDpsnTHz0gnoTGk0OXZW0JRs+cDwL8u/teRdz+8A=="], + "sucrase/commander": ["commander@4.1.1", "", {}, "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA=="], + "supports-hyperlinks/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], "table/ajv": ["ajv@8.17.1", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2" } }, "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g=="], @@ -2840,6 +2952,8 @@ "tsdown/semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="], + "tsup/tinyexec": ["tinyexec@0.3.2", "", {}, "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA=="], + "unenv/mime": ["mime@3.0.0", "", { "bin": { "mime": "cli.js" } }, "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A=="], "unimport/escape-string-regexp": ["escape-string-regexp@5.0.0", "", {}, "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw=="], @@ -2868,6 +2982,12 @@ "vite/rolldown": ["rolldown@1.0.0-beta.39", "", { "dependencies": { "@oxc-project/types": "=0.90.0", "@rolldown/pluginutils": "1.0.0-beta.39", "ansis": "^4.0.0" }, "optionalDependencies": { "@rolldown/binding-android-arm64": "1.0.0-beta.39", "@rolldown/binding-darwin-arm64": "1.0.0-beta.39", "@rolldown/binding-darwin-x64": "1.0.0-beta.39", "@rolldown/binding-freebsd-x64": "1.0.0-beta.39", "@rolldown/binding-linux-arm-gnueabihf": "1.0.0-beta.39", "@rolldown/binding-linux-arm64-gnu": "1.0.0-beta.39", "@rolldown/binding-linux-arm64-musl": "1.0.0-beta.39", "@rolldown/binding-linux-x64-gnu": "1.0.0-beta.39", "@rolldown/binding-linux-x64-musl": "1.0.0-beta.39", "@rolldown/binding-openharmony-arm64": "1.0.0-beta.39", "@rolldown/binding-wasm32-wasi": "1.0.0-beta.39", "@rolldown/binding-win32-arm64-msvc": "1.0.0-beta.39", "@rolldown/binding-win32-ia32-msvc": "1.0.0-beta.39", "@rolldown/binding-win32-x64-msvc": "1.0.0-beta.39" }, "bin": { "rolldown": "bin/cli.mjs" } }, "sha512-05bTT0CJU9dvCRC0Uc4zwB79W5N9MV9OG/Inyx8KNE2pSrrApJoWxEEArW6rmjx113HIx5IreCoTjzLfgvXTdg=="], + "vite-node/pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="], + + "vitest/pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="], + + "vitest/tinyexec": ["tinyexec@0.3.2", "", {}, "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA=="], + "widest-line/string-width": ["string-width@7.2.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="], "wrap-ansi/ansi-styles": ["ansi-styles@6.2.3", "", {}, "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg=="], @@ -2898,6 +3018,8 @@ "@isaacs/cliui/wrap-ansi/ansi-styles": ["ansi-styles@6.2.3", "", {}, "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg=="], + "@mapbox/node-pre-gyp/node-fetch/whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="], + "@modelcontextprotocol/sdk/ajv/json-schema-traverse": ["json-schema-traverse@1.0.0", "", {}, "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug=="], "@svgr/plugin-svgo/svgo/commander": ["commander@7.2.0", "", {}, "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw=="], @@ -3070,6 +3192,10 @@ "wrap-ansi/strip-ansi/ansi-regex": ["ansi-regex@6.2.2", "", {}, "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg=="], + "@mapbox/node-pre-gyp/node-fetch/whatwg-url/tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="], + + "@mapbox/node-pre-gyp/node-fetch/whatwg-url/webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="], + "@svgr/plugin-svgo/svgo/css-tree/mdn-data": ["mdn-data@2.0.30", "", {}, "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA=="], "@trivago/prettier-plugin-sort-imports/minimatch/brace-expansion/balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="],